From 731e882d222c54e95836c9b4b816a9cc40318b90 Mon Sep 17 00:00:00 2001 From: yanliang567 <82361606+yanliang567@users.noreply.github.com> Date: Mon, 6 Jan 2025 11:24:55 +0800 Subject: [PATCH] test: Refactor pymilvus client v2 testbase and add a test for search hint (#38939) issue: https://github.com/milvus-io/milvus/issues/38877 1. refactor pymilvus client v2 testcasebase 2. add a test for search hint 3. update pymilvus to 2.6 --------- Signed-off-by: yanliang567 --- tests/python_client/base/client_base.py | 31 +- tests/python_client/base/client_v2_base.py | 867 +++++++++ .../base/high_level_api_wrapper.py | 1700 ++++++++--------- tests/python_client/check/func_check.py | 2 +- .../milvus_client/test_milvus_client_alias.py | 255 ++- .../milvus_client/test_milvus_client_alter.py | 306 ++- .../test_milvus_client_collection.py | 680 ++++--- .../test_milvus_client_delete.py | 163 +- .../milvus_client/test_milvus_client_index.py | 471 +++-- .../test_milvus_client_insert.py | 558 +++--- .../test_milvus_client_partition.py | 522 +++-- .../milvus_client/test_milvus_client_query.py | 258 ++- .../milvus_client/test_milvus_client_rbac.py | 386 ++-- .../test_milvus_client_search.py | 416 ++-- tests/python_client/requirements.txt | 4 +- .../async_milvus_client/test_e2e_async.py | 48 +- .../testcases/test_high_level_api.py | 223 +-- 17 files changed, 3830 insertions(+), 3060 deletions(-) create mode 100644 tests/python_client/base/client_v2_base.py diff --git a/tests/python_client/base/client_base.py b/tests/python_client/base/client_base.py index 2a708f100d68c..8e668dff529c4 100644 --- a/tests/python_client/base/client_base.py +++ b/tests/python_client/base/client_base.py @@ -12,7 +12,6 @@ from base.index_wrapper import ApiIndexWrapper from base.utility_wrapper import ApiUtilityWrapper from base.schema_wrapper import ApiCollectionSchemaWrapper, ApiFieldSchemaWrapper -from base.high_level_api_wrapper import HighLevelApiWrapper from base.async_milvus_client_wrapper import AsyncMilvusClientWrapper from utils.util_log import test_log as log from common import common_func as cf @@ -33,9 +32,8 @@ class Base: collection_schema_wrap = None field_schema_wrap = None database_wrap = None - collection_object_list = [] + tear_down_collection_names = [] resource_group_list = [] - high_level_api_wrap = None async_milvus_client_wrap = None skip_connection = False @@ -60,7 +58,6 @@ def _setup_objects(self): self.collection_schema_wrap = ApiCollectionSchemaWrapper() self.field_schema_wrap = ApiFieldSchemaWrapper() self.database_wrap = ApiDatabaseWrapper() - self.high_level_api_wrap = HighLevelApiWrapper() self.async_milvus_client_wrap = AsyncMilvusClientWrapper() def teardown_method(self, method): @@ -83,9 +80,9 @@ def _teardown_objects(self): self.collection_wrap.drop(check_task=ct.CheckTasks.check_nothing) collection_list = self.utility_wrap.list_collections()[0] - for collection_object in self.collection_object_list: - if collection_object.collection is not None and collection_object.name in collection_list: - collection_object.drop(check_task=ct.CheckTasks.check_nothing) + for collection_name in self.tear_down_collection_names: + if collection_name is not None and collection_name in collection_list: + self.collection_wrap.init_collection(name=collection_name)[0].drop() """ Clean up the rgs before disconnect """ rgs_list = self.utility_wrap.list_resource_groups()[0] @@ -169,15 +166,15 @@ def _connect(self, enable_milvus_client_api=False): log.info(f"server version: {server_version}") return res - def init_async_milvus_client(self): - uri = cf.param_info.param_uri or f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" - kwargs = { - "uri": uri, - "user": cf.param_info.param_user, - "password": cf.param_info.param_password, - "token": cf.param_info.param_token, - } - self.async_milvus_client_wrap.init_async_client(**kwargs) + # def init_async_milvus_client(self): + # uri = cf.param_info.param_uri or f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" + # kwargs = { + # "uri": uri, + # "user": cf.param_info.param_user, + # "password": cf.param_info.param_password, + # "token": cf.param_info.param_token, + # } + # self.async_milvus_client_wrap.init_async_client(**kwargs) def init_collection_wrap(self, name=None, schema=None, check_task=None, check_items=None, enable_dynamic_field=False, with_json=True, **kwargs): @@ -189,7 +186,7 @@ def init_collection_wrap(self, name=None, schema=None, check_task=None, check_it collection_w = ApiCollectionWrapper() collection_w.init_collection(name=name, schema=schema, check_task=check_task, check_items=check_items, **kwargs) - self.collection_object_list.append(collection_w) + self.tear_down_collection_names.append(name) return collection_w def init_multi_fields_collection_wrap(self, name=cf.gen_unique_str()): diff --git a/tests/python_client/base/client_v2_base.py b/tests/python_client/base/client_v2_base.py new file mode 100644 index 0000000000000..4dfaf9b33051a --- /dev/null +++ b/tests/python_client/base/client_v2_base.py @@ -0,0 +1,867 @@ +import sys +from typing import Optional +from pymilvus import MilvusClient + +sys.path.append("..") +from check.func_check import ResponseChecker +from utils.api_request import api_request +from utils.wrapper import trace +from utils.util_log import test_log as log +from common import common_func as cf +from base.client_base import Base + +TIMEOUT = 120 +INDEX_NAME = "" + + +class TestMilvusClientV2Base(Base): + + # milvus_client = None + active_trace = False + + def init_async_milvus_client(self): + uri = cf.param_info.param_uri or f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" + kwargs = { + "uri": uri, + "user": cf.param_info.param_user, + "password": cf.param_info.param_password, + "token": cf.param_info.param_token, + } + self.async_milvus_client_wrap.init_async_client(**kwargs) + + def _client(self, active_trace=False): + """ return MilvusClient instance if connected successfully, otherwise return None""" + if self.skip_connection: + return None + if cf.param_info.param_uri: + uri = cf.param_info.param_uri + else: + uri = "http://" + cf.param_info.param_host + ":" + str(cf.param_info.param_port) + res, is_succ = self.init_milvus_client(uri=uri, token=cf.param_info.param_token, active_trace=active_trace) + if is_succ: + # self.milvus_client = res + log.info(f"server version: {res.get_server_version()}") + return res + + def init_milvus_client(self, uri, user="", password="", db_name="", token="", timeout=None, + check_task=None, check_items=None, active_trace=False, **kwargs): + self.active_trace = active_trace + func_name = sys._getframe().f_code.co_name + res, is_succ = api_request([MilvusClient, uri, user, password, db_name, token, timeout], **kwargs) + # self.milvus_client = res if is_succ else None + check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ, + uri=uri, user=user, password=password, db_name=db_name, token=token, + timeout=timeout, **kwargs).run() + return res, check_result + + @trace() + def close(self, client, check_task=None, check_items=None): + func_name = sys._getframe().f_code.co_name + res, is_succ = api_request([client.close]) + check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ).run() + return res, check_result + + @trace() + def create_schema(self, client, timeout=None, check_task=None, + check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_schema], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + **kwargs).run() + return res, check_result + + @trace() + def create_collection(self, client, collection_name, dimension=None, primary_field_name='id', + id_type='int', vector_field_name='vector', metric_type='COSINE', + auto_id=False, schema=None, index_params=None, timeout=None, check_task=None, + check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + consistency_level = kwargs.get("consistency_level", "Strong") + kwargs.update({"consistency_level": consistency_level}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_collection, collection_name, dimension, primary_field_name, + id_type, vector_field_name, metric_type, auto_id, timeout, schema, + index_params], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, dimension=dimension, + **kwargs).run() + + self.tear_down_collection_names.append(collection_name) + return res, check_result + + def has_collection(self, client, collection_name, timeout=None, check_task=None, + check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.has_collection, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def insert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.insert, collection_name, data], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + **kwargs).run() + return res, check_result + + @trace() + def upsert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.upsert, collection_name, data], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, data=data, + **kwargs).run() + return res, check_result + + @trace() + def get_collection_stats(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.get_collection_stats, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, **kwargs).run() + return res, check_result + + @trace() + def search(self, client, collection_name, data, limit=10, filter=None, output_fields=None, search_params=None, + timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.search, collection_name, data, filter, limit, + output_fields, search_params], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, data=data, limit=limit, filter=filter, + output_fields=output_fields, search_params=search_params, + **kwargs).run() + return res, check_result + + @trace() + def query(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.query, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def get(self, client, collection_name, ids, output_fields=None, + timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.get, collection_name, ids, output_fields], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, ids=ids, + output_fields=output_fields, + **kwargs).run() + return res, check_result + + @trace() + def num_entities(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.num_entities, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def delete(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.delete, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def flush(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.flush, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def describe_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_collection, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def list_collections(self, client, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_collections], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + **kwargs).run() + return res, check_result + + @trace() + def drop_collection(self, client, collection_name, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_collection, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def list_partitions(self, client, collection_name, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_partitions, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def list_indexes(self, client, collection_name, field_name=None, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_indexes, collection_name, field_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def get_load_state(self, client, collection_name, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.get_load_state, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def prepare_index_params(self, client, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.prepare_index_params], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + **kwargs).run() + return res, check_result + + @trace() + def load_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.load_collection, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, **kwargs).run() + return res, check_result + + @trace() + def release_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.release_collection, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, **kwargs).run() + return res, check_result + + @trace() + def load_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.load_partitions, collection_name, partition_names], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_names=partition_names, + **kwargs).run() + return res, check_result + + @trace() + def release_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.release_partitions, collection_name, partition_names], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_names=partition_names, + **kwargs).run() + return res, check_result + + @trace() + def rename_collection(self, client, old_name, new_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.rename_collection, old_name, new_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + old_name=old_name, + new_name=new_name, + **kwargs).run() + return res, check_result + + @trace() + def create_database(self, client, db_name, properties: Optional[dict] = None, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_database, db_name, properties], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + db_name=db_name, properties=properties, + **kwargs).run() + return res, check_result + + @trace() + def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_partition, collection_name, partition_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_name=partition_name, + **kwargs).run() + return res, check_result + + @trace() + def list_partitions(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_partitions, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def drop_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_partition, collection_name, partition_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_name=partition_name, + **kwargs).run() + return res, check_result + + @trace() + def has_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.has_partition, collection_name, partition_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_name=partition_name, + **kwargs).run() + return res, check_result + + @trace() + def get_partition_stats(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.get_partition_stats, collection_name, partition_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + partition_name=partition_name, + **kwargs).run() + return res, check_result + + @trace() + def prepare_index_params(self, client, check_task=None, check_items=None, **kwargs): + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.prepare_index_params], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + **kwargs).run() + return res, check_result + + @trace() + def create_index(self, client, collection_name, index_params, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_index, collection_name, index_params], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + index_params=index_params, + **kwargs).run() + return res, check_result + + @trace() + def drop_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_index, collection_name, index_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + index_name=index_name, + **kwargs).run() + return res, check_result + + @trace() + def describe_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_index, collection_name, index_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + index_name=index_name, + **kwargs).run() + return res, check_result + + @trace() + def list_indexes(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_indexes, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def create_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_alias, collection_name, alias], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + alias=alias, + **kwargs).run() + return res, check_result + + @trace() + def drop_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_alias, alias], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + alias=alias, + **kwargs).run() + return res, check_result + + @trace() + def alter_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.alter_alias, collection_name, alias], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + collection_name=collection_name, + alias=alias, + **kwargs).run() + return res, check_result + + @trace() + def describe_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_alias, alias], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, + alias=alias, + **kwargs).run() + return res, check_result + + @trace() + def list_aliases(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_aliases, collection_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, collection_name=collection_name, + **kwargs).run() + return res, check_result + + @trace() + def using_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.using_database, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + def create_user(self, client, user_name, password, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_user, user_name, password], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, user_name=user_name, + password=password, **kwargs).run() + return res, check_result + + @trace() + def drop_user(self, client, user_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_user, user_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, user_name=user_name, **kwargs).run() + return res, check_result + + @trace() + def update_password(self, client, user_name, old_password, new_password, reset_connection=False, + timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.update_password, user_name, old_password, new_password, + reset_connection], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, user_name=user_name, old_password=old_password, + new_password=new_password, reset_connection=reset_connection, + **kwargs).run() + return res, check_result + + @trace() + def list_users(self, client, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_users], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, **kwargs).run() + return res, check_result + + @trace() + def describe_user(self, client, user_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_user, user_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, user_name=user_name, **kwargs).run() + return res, check_result + + @trace() + def create_role(self, client, role_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_role, role_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, role_name=role_name, **kwargs).run() + return res, check_result + + @trace() + def drop_role(self, client, role_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_role, role_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, role_name=role_name, **kwargs).run() + return res, check_result + + @trace() + def describe_role(self, client, role_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_role, role_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, role_name=role_name, **kwargs).run() + return res, check_result + + @trace() + def list_roles(self, client, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_roles], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, + check_items, check, **kwargs).run() + return res, check_result + + @trace() + def grant_role(self, client, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.grant_role, user_name, role_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + user_name=user_name, role_name=role_name, **kwargs).run() + return res, check_result + + @trace() + def revoke_role(self, client, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.revoke_role, user_name, role_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + user_name=user_name, role_name=role_name, **kwargs).run() + return res, check_result + + @trace() + def grant_privilege(self, client, role_name, object_type, privilege, object_name, db_name="", + timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.grant_privilege, role_name, object_type, privilege, + object_name, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + role_name=role_name, object_type=object_type, privilege=privilege, + object_name=object_name, db_name=db_name, **kwargs).run() + return res, check_result + + @trace() + def revoke_privilege(self, client, role_name, object_type, privilege, object_name, db_name="", + timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.revoke_privilege, role_name, object_type, privilege, + object_name, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, + role_name=role_name, object_type=object_type, privilege=privilege, + object_name=object_name, db_name=db_name, **kwargs).run() + return res, check_result + + def create_privilege_group(self, client, privilege_group: str, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_privilege_group, privilege_group], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + def drop_privilege_group(self, client, privilege_group: str, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_privilege_group, privilege_group], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + def list_privilege_groups(self, client, check_task=None, check_items=None, **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_privilege_groups], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + def add_privileges_to_group(self, client, privilege_group: str, privileges: list, check_task=None, check_items=None, + **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.add_privileges_to_group, privilege_group, privileges], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + def remove_privileges_from_group(self, client, privilege_group: str, privileges: list, check_task=None, check_items=None, + **kwargs): + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.remove_privileges_from_group, privilege_group, privileges], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def alter_index_properties(self, client, collection_name, index_name, properties, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.alter_index_properties, collection_name, index_name, properties], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def drop_index_properties(self, client, collection_name, index_name, property_keys, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_index_properties, collection_name, index_name, property_keys], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def alter_collection_properties(self, client, collection_name, properties, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.alter_collection_properties, collection_name, properties], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def drop_collection_properties(self, client, collection_name, property_keys, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_collection_properties, collection_name, property_keys, timeout], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def alter_collection_field(self, client, collection_name, field_name, field_params, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.alter_collection_field, collection_name, field_name, field_params, timeout], + **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def alter_database_properties(self, client, db_name, properties, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.alter_database_properties, db_name, properties], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def drop_database_properties(self, client, db_name, property_keys, timeout=None, + check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_database_properties, db_name, property_keys], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def create_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.create_database, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def describe_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.describe_database, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def drop_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.drop_database, db_name], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + @trace() + def list_databases(self, client, timeout=None, check_task=None, check_items=None, **kwargs): + timeout = TIMEOUT if timeout is None else timeout + kwargs.update({"timeout": timeout}) + + func_name = sys._getframe().f_code.co_name + res, check = api_request([client.list_databases], **kwargs) + check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() + return res, check_result + + diff --git a/tests/python_client/base/high_level_api_wrapper.py b/tests/python_client/base/high_level_api_wrapper.py index 10287a125a4f4..7fa3aec8dcae5 100644 --- a/tests/python_client/base/high_level_api_wrapper.py +++ b/tests/python_client/base/high_level_api_wrapper.py @@ -1,850 +1,850 @@ -import sys -import time -from typing import Optional - -import timeout_decorator -from numpy import NaN - -from pymilvus import Collection -from pymilvus import MilvusClient - -sys.path.append("..") -from check.func_check import ResponseChecker -from utils.api_request import api_request -from utils.wrapper import trace -from utils.util_log import test_log as log -from pymilvus.orm.types import CONSISTENCY_STRONG -from common.common_func import param_info - -TIMEOUT = 120 -INDEX_NAME = "" - - -# keep small timeout for stability tests -# TIMEOUT = 5 - - -class HighLevelApiWrapper: - - milvus_client = None - - def __init__(self, active_trace=False): - self.active_trace = active_trace - - def init_milvus_client(self, uri, user="", password="", db_name="", token="", timeout=None, - check_task=None, check_items=None, active_trace=False, **kwargs): - self.active_trace = active_trace - func_name = sys._getframe().f_code.co_name - res, is_succ = api_request([MilvusClient, uri, user, password, db_name, token, timeout], **kwargs) - self.milvus_client = res if is_succ else None - check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ, - uri=uri, user=user, password=password, db_name=db_name, token=token, - timeout=timeout, **kwargs).run() - return res, check_result - - @trace() - def close(self, client, check_task=None, check_items=None): - func_name = sys._getframe().f_code.co_name - res, is_succ = api_request([client.close]) - check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ).run() - return res, check_result - - @trace() - def create_schema(self, client, timeout=None, check_task=None, - check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_schema], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - **kwargs).run() - return res, check_result - - @trace() - def create_collection(self, client, collection_name, dimension=None, primary_field_name='id', - id_type='int', vector_field_name='vector', metric_type='COSINE', - auto_id=False, schema=None, index_params=None, timeout=None, check_task=None, - check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_collection, collection_name, dimension, primary_field_name, - id_type, vector_field_name, metric_type, auto_id, timeout, schema, - index_params], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, dimension=dimension, - **kwargs).run() - return res, check_result - - def has_collection(self, client, collection_name, timeout=None, check_task=None, - check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.has_collection, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def insert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.insert, collection_name, data], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - **kwargs).run() - return res, check_result - - @trace() - def upsert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.upsert, collection_name, data], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, data=data, - **kwargs).run() - return res, check_result - - @trace() - def get_collection_stats(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.get_collection_stats, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, **kwargs).run() - return res, check_result - - @trace() - def search(self, client, collection_name, data, limit=10, filter=None, output_fields=None, search_params=None, - timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.search, collection_name, data, filter, limit, - output_fields, search_params], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, data=data, limit=limit, filter=filter, - output_fields=output_fields, search_params=search_params, - **kwargs).run() - return res, check_result - - @trace() - def query(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.query, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def get(self, client, collection_name, ids, output_fields=None, - timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.get, collection_name, ids, output_fields], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, ids=ids, - output_fields=output_fields, - **kwargs).run() - return res, check_result - - @trace() - def num_entities(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.num_entities, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def delete(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.delete, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def flush(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.flush, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def describe_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.describe_collection, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def list_collections(self, client, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_collections], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - **kwargs).run() - return res, check_result - - @trace() - def drop_collection(self, client, collection_name, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_collection, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def list_partitions(self, client, collection_name, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_partitions, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def list_indexes(self, client, collection_name, field_name=None, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_indexes, collection_name, field_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def get_load_state(self, client, collection_name, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.get_load_state, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def prepare_index_params(self, client, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.prepare_index_params], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - **kwargs).run() - return res, check_result - - @trace() - def load_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.load_collection, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, **kwargs).run() - return res, check_result - - @trace() - def release_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.release_collection, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, **kwargs).run() - return res, check_result - - @trace() - def load_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.load_partitions, collection_name, partition_names], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_names=partition_names, - **kwargs).run() - return res, check_result - - @trace() - def release_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.release_partitions, collection_name, partition_names], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_names=partition_names, - **kwargs).run() - return res, check_result - - @trace() - def rename_collection(self, client, old_name, new_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.rename_collection, old_name, new_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - old_name=old_name, - new_name=new_name, - **kwargs).run() - return res, check_result - - @trace() - def create_database(self, client, db_name, properties: Optional[dict] = None, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_database, db_name, properties], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - db_name=db_name, properties=properties, - **kwargs).run() - return res, check_result - - @trace() - def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_partition, collection_name, partition_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_name=partition_name, - **kwargs).run() - return res, check_result - - @trace() - def list_partitions(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_partitions, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def drop_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_partition, collection_name, partition_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_name=partition_name, - **kwargs).run() - return res, check_result - - @trace() - def has_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.has_partition, collection_name, partition_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_name=partition_name, - **kwargs).run() - return res, check_result - - @trace() - def get_partition_stats(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.get_partition_stats, collection_name, partition_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - partition_name=partition_name, - **kwargs).run() - return res, check_result - - @trace() - def prepare_index_params(self, client, check_task=None, check_items=None, **kwargs): - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.prepare_index_params], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - **kwargs).run() - return res, check_result - - @trace() - def create_index(self, client, collection_name, index_params, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_index, collection_name, index_params], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - index_params=index_params, - **kwargs).run() - return res, check_result - - @trace() - def drop_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_index, collection_name, index_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - index_name=index_name, - **kwargs).run() - return res, check_result - - @trace() - def describe_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.describe_index, collection_name, index_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - index_name=index_name, - **kwargs).run() - return res, check_result - - @trace() - def list_indexes(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_indexes, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def create_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_alias, collection_name, alias], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - alias=alias, - **kwargs).run() - return res, check_result - - @trace() - def drop_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_alias, alias], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - alias=alias, - **kwargs).run() - return res, check_result - - @trace() - def alter_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.alter_alias, collection_name, alias], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - collection_name=collection_name, - alias=alias, - **kwargs).run() - return res, check_result - - @trace() - def describe_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.describe_alias, alias], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, - alias=alias, - **kwargs).run() - return res, check_result - - @trace() - def list_aliases(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_aliases, collection_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, collection_name=collection_name, - **kwargs).run() - return res, check_result - - @trace() - def using_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.using_database, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - def create_user(self, user_name, password, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.create_user, user_name, password], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, user_name=user_name, - password=password, **kwargs).run() - return res, check_result - - @trace() - def drop_user(self, user_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.drop_user, user_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, user_name=user_name, **kwargs).run() - return res, check_result - - @trace() - def update_password(self, user_name, old_password, new_password, reset_connection=False, - timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.update_password, user_name, old_password, new_password, - reset_connection], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, user_name=user_name, old_password=old_password, - new_password=new_password, reset_connection=reset_connection, - **kwargs).run() - return res, check_result - - @trace() - def list_users(self, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.list_users], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, **kwargs).run() - return res, check_result - - @trace() - def describe_user(self, user_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.describe_user, user_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, user_name=user_name, **kwargs).run() - return res, check_result - - @trace() - def create_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.create_role, role_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, role_name=role_name, **kwargs).run() - return res, check_result - - @trace() - def drop_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.drop_role, role_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, role_name=role_name, **kwargs).run() - return res, check_result - - @trace() - def describe_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.describe_role, role_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, role_name=role_name, **kwargs).run() - return res, check_result - - @trace() - def list_roles(self, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.list_roles], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, - check_items, check, **kwargs).run() - return res, check_result - - @trace() - def grant_role(self, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.grant_role, user_name, role_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - user_name=user_name, role_name=role_name, **kwargs).run() - return res, check_result - - @trace() - def revoke_role(self, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.revoke_role, user_name, role_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - user_name=user_name, role_name=role_name, **kwargs).run() - return res, check_result - - @trace() - def grant_privilege(self, role_name, object_type, privilege, object_name, db_name="", - timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.grant_privilege, role_name, object_type, privilege, - object_name, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - role_name=role_name, object_type=object_type, privilege=privilege, - object_name=object_name, db_name=db_name, **kwargs).run() - return res, check_result - - @trace() - def revoke_privilege(self, role_name, object_type, privilege, object_name, db_name="", - timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.revoke_privilege, role_name, object_type, privilege, - object_name, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, - role_name=role_name, object_type=object_type, privilege=privilege, - object_name=object_name, db_name=db_name, **kwargs).run() - return res, check_result - - def create_privilege_group(self, privilege_group: str, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.create_privilege_group, privilege_group], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - def drop_privilege_group(self, privilege_group: str, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.drop_privilege_group, privilege_group], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - def list_privilege_groups(self, check_task=None, check_items=None, **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.list_privilege_groups], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - def add_privileges_to_group(self, privilege_group: str, privileges: list, check_task=None, check_items=None, - **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.add_privileges_to_group, privilege_group, privileges], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - def remove_privileges_from_group(self, privilege_group: str, privileges: list, check_task=None, check_items=None, - **kwargs): - func_name = sys._getframe().f_code.co_name - res, check = api_request([self.milvus_client.remove_privileges_from_group, privilege_group, privileges], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def alter_index_properties(self, client, collection_name, index_name, properties, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.alter_index_properties, collection_name, index_name, properties], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def drop_index_properties(self, client, collection_name, index_name, property_keys, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_index_properties, collection_name, index_name, property_keys], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def alter_collection_properties(self, client, collection_name, properties, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.alter_collection_properties, collection_name, properties], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def drop_collection_properties(self, client, collection_name, property_keys, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_collection_properties, collection_name, property_keys, timeout], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def alter_collection_field(self, client, collection_name, field_name, field_params, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.alter_collection_field, collection_name, field_name, field_params, timeout], - **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def alter_database_properties(self, client, db_name, properties, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.alter_database_properties, db_name, properties], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def drop_database_properties(self, client, db_name, property_keys, timeout=None, - check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_database_properties, db_name, property_keys], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def create_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.create_database, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def describe_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.describe_database, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def drop_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.drop_database, db_name], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - - @trace() - def list_databases(self, client, timeout=None, check_task=None, check_items=None, **kwargs): - timeout = TIMEOUT if timeout is None else timeout - kwargs.update({"timeout": timeout}) - - func_name = sys._getframe().f_code.co_name - res, check = api_request([client.list_databases], **kwargs) - check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() - return res, check_result - +# import sys +# import time +# from typing import Optional +# +# import timeout_decorator +# from numpy import NaN +# +# from pymilvus import Collection +# from pymilvus import MilvusClient +# +# sys.path.append("..") +# from check.func_check import ResponseChecker +# from utils.api_request import api_request +# from utils.wrapper import trace +# from utils.util_log import test_log as log +# from pymilvus.orm.types import CONSISTENCY_STRONG +# from common.common_func import param_info +# +# TIMEOUT = 120 +# INDEX_NAME = "" +# +# +# # keep small timeout for stability tests +# # TIMEOUT = 5 +# +# +# class HighLevelApiWrapper: +# +# milvus_client = None +# +# def __init__(self, active_trace=False): +# self.active_trace = active_trace +# +# def init_milvus_client(self, uri, user="", password="", db_name="", token="", timeout=None, +# check_task=None, check_items=None, active_trace=False, **kwargs): +# self.active_trace = active_trace +# func_name = sys._getframe().f_code.co_name +# res, is_succ = api_request([MilvusClient, uri, user, password, db_name, token, timeout], **kwargs) +# self.milvus_client = res if is_succ else None +# check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ, +# uri=uri, user=user, password=password, db_name=db_name, token=token, +# timeout=timeout, **kwargs).run() +# return res, check_result +# +# @trace() +# def close(self, client, check_task=None, check_items=None): +# func_name = sys._getframe().f_code.co_name +# res, is_succ = api_request([client.close]) +# check_result = ResponseChecker(res, func_name, check_task, check_items, is_succ).run() +# return res, check_result +# +# @trace() +# def create_schema(self, client, timeout=None, check_task=None, +# check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_schema], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def create_collection(self, client, collection_name, dimension=None, primary_field_name='id', +# id_type='int', vector_field_name='vector', metric_type='COSINE', +# auto_id=False, schema=None, index_params=None, timeout=None, check_task=None, +# check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_collection, collection_name, dimension, primary_field_name, +# id_type, vector_field_name, metric_type, auto_id, timeout, schema, +# index_params], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, dimension=dimension, +# **kwargs).run() +# return res, check_result +# +# def has_collection(self, client, collection_name, timeout=None, check_task=None, +# check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.has_collection, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def insert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.insert, collection_name, data], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def upsert(self, client, collection_name, data, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.upsert, collection_name, data], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, data=data, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def get_collection_stats(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.get_collection_stats, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def search(self, client, collection_name, data, limit=10, filter=None, output_fields=None, search_params=None, +# timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.search, collection_name, data, filter, limit, +# output_fields, search_params], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, data=data, limit=limit, filter=filter, +# output_fields=output_fields, search_params=search_params, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def query(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.query, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def get(self, client, collection_name, ids, output_fields=None, +# timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.get, collection_name, ids, output_fields], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, ids=ids, +# output_fields=output_fields, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def num_entities(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.num_entities, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def delete(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.delete, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def flush(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.flush, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.describe_collection, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_collections(self, client, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_collections], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_collection(self, client, collection_name, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_collection, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_partitions(self, client, collection_name, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_partitions, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_indexes(self, client, collection_name, field_name=None, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_indexes, collection_name, field_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def get_load_state(self, client, collection_name, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.get_load_state, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def prepare_index_params(self, client, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.prepare_index_params], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def load_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.load_collection, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def release_collection(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.release_collection, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def load_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.load_partitions, collection_name, partition_names], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_names=partition_names, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def release_partitions(self, client, collection_name, partition_names, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.release_partitions, collection_name, partition_names], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_names=partition_names, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def rename_collection(self, client, old_name, new_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.rename_collection, old_name, new_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# old_name=old_name, +# new_name=new_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def create_database(self, client, db_name, properties: Optional[dict] = None, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_database, db_name, properties], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# db_name=db_name, properties=properties, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def create_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_partition, collection_name, partition_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_name=partition_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_partitions(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_partitions, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_partition, collection_name, partition_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_name=partition_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def has_partition(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.has_partition, collection_name, partition_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_name=partition_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def get_partition_stats(self, client, collection_name, partition_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.get_partition_stats, collection_name, partition_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# partition_name=partition_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def prepare_index_params(self, client, check_task=None, check_items=None, **kwargs): +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.prepare_index_params], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def create_index(self, client, collection_name, index_params, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_index, collection_name, index_params], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# index_params=index_params, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_index, collection_name, index_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# index_name=index_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_index(self, client, collection_name, index_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.describe_index, collection_name, index_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# index_name=index_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_indexes(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_indexes, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def create_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_alias, collection_name, alias], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# alias=alias, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_alias, alias], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# alias=alias, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def alter_alias(self, client, collection_name, alias, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.alter_alias, collection_name, alias], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# collection_name=collection_name, +# alias=alias, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_alias(self, client, alias, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.describe_alias, alias], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, +# alias=alias, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_aliases(self, client, collection_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_aliases, collection_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, collection_name=collection_name, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def using_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.using_database, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# def create_user(self, user_name, password, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.create_user, user_name, password], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, user_name=user_name, +# password=password, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_user(self, user_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.drop_user, user_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, user_name=user_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def update_password(self, user_name, old_password, new_password, reset_connection=False, +# timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.update_password, user_name, old_password, new_password, +# reset_connection], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, user_name=user_name, old_password=old_password, +# new_password=new_password, reset_connection=reset_connection, +# **kwargs).run() +# return res, check_result +# +# @trace() +# def list_users(self, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.list_users], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_user(self, user_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.describe_user, user_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, user_name=user_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def create_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.create_role, role_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, role_name=role_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.drop_role, role_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, role_name=role_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_role(self, role_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.describe_role, role_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, role_name=role_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def list_roles(self, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.list_roles], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, +# check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def grant_role(self, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.grant_role, user_name, role_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# user_name=user_name, role_name=role_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def revoke_role(self, user_name, role_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.revoke_role, user_name, role_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# user_name=user_name, role_name=role_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def grant_privilege(self, role_name, object_type, privilege, object_name, db_name="", +# timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.grant_privilege, role_name, object_type, privilege, +# object_name, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# role_name=role_name, object_type=object_type, privilege=privilege, +# object_name=object_name, db_name=db_name, **kwargs).run() +# return res, check_result +# +# @trace() +# def revoke_privilege(self, role_name, object_type, privilege, object_name, db_name="", +# timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.revoke_privilege, role_name, object_type, privilege, +# object_name, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, +# role_name=role_name, object_type=object_type, privilege=privilege, +# object_name=object_name, db_name=db_name, **kwargs).run() +# return res, check_result +# +# def create_privilege_group(self, privilege_group: str, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.create_privilege_group, privilege_group], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# def drop_privilege_group(self, privilege_group: str, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.drop_privilege_group, privilege_group], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# def list_privilege_groups(self, check_task=None, check_items=None, **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.list_privilege_groups], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# def add_privileges_to_group(self, privilege_group: str, privileges: list, check_task=None, check_items=None, +# **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.add_privileges_to_group, privilege_group, privileges], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# def remove_privileges_from_group(self, privilege_group: str, privileges: list, check_task=None, check_items=None, +# **kwargs): +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([self.milvus_client.remove_privileges_from_group, privilege_group, privileges], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def alter_index_properties(self, client, collection_name, index_name, properties, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.alter_index_properties, collection_name, index_name, properties], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_index_properties(self, client, collection_name, index_name, property_keys, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_index_properties, collection_name, index_name, property_keys], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def alter_collection_properties(self, client, collection_name, properties, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.alter_collection_properties, collection_name, properties], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_collection_properties(self, client, collection_name, property_keys, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_collection_properties, collection_name, property_keys, timeout], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def alter_collection_field(self, client, collection_name, field_name, field_params, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.alter_collection_field, collection_name, field_name, field_params, timeout], +# **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def alter_database_properties(self, client, db_name, properties, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.alter_database_properties, db_name, properties], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_database_properties(self, client, db_name, property_keys, timeout=None, +# check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_database_properties, db_name, property_keys], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def create_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.create_database, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def describe_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.describe_database, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def drop_database(self, client, db_name, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.drop_database, db_name], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# +# @trace() +# def list_databases(self, client, timeout=None, check_task=None, check_items=None, **kwargs): +# timeout = TIMEOUT if timeout is None else timeout +# kwargs.update({"timeout": timeout}) +# +# func_name = sys._getframe().f_code.co_name +# res, check = api_request([client.list_databases], **kwargs) +# check_result = ResponseChecker(res, func_name, check_task, check_items, check, **kwargs).run() +# return res, check_result +# diff --git a/tests/python_client/check/func_check.py b/tests/python_client/check/func_check.py index d1b70206c2384..c4bdfae434985 100644 --- a/tests/python_client/check/func_check.py +++ b/tests/python_client/check/func_check.py @@ -657,7 +657,7 @@ def check_permission_deny(res, actual=True): def check_auth_failure(res, actual=True): assert actual is False if isinstance(res, Error): - assert "auth" in res.message + assert "auth check failure" in res.message else: log.error("[CheckFunc] Response of API is not an error: %s" % str(res)) assert False diff --git a/tests/python_client/milvus_client/test_milvus_client_alias.py b/tests/python_client/milvus_client/test_milvus_client_alias.py index 3690e6bbe87f4..2442bef2282a0 100644 --- a/tests/python_client/milvus_client/test_milvus_client_alias.py +++ b/tests/python_client/milvus_client/test_milvus_client_alias.py @@ -1,27 +1,14 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base + from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_alias" +prefix = "client_alias" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -46,7 +33,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientAliasInvalid(TestcaseBase): +class TestMilvusClientAliasInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -72,13 +59,13 @@ def test_milvus_client_create_alias_invalid_collection_name(self, collection_nam is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.create_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) + self.create_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_alias_collection_name_over_max_length(self): @@ -88,13 +75,13 @@ def test_milvus_client_create_alias_collection_name_over_max_length(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") collection_name = "a".join("a" for i in range(256)) # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.create_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) + self.create_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_alias_not_exist_collection(self): @@ -104,12 +91,12 @@ def test_milvus_client_create_alias_not_exist_collection(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") collection_name = "not_exist_collection_alias" error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=default][collection={collection_name}]"} - client_w.create_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) + self.create_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("alias", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -120,17 +107,17 @@ def test_milvus_client_create_alias_invalid_alias_name(self, alias): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection alias: {alias}. " f"the first character of a collection alias must be an underscore or letter"} - client_w.create_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_alias_name_over_max_length(self): @@ -140,16 +127,16 @@ def test_milvus_client_create_alias_name_over_max_length(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) alias = "a".join("a" for i in range(256)) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection alias must be less than 255 characters"} - client_w.create_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_alias_same_collection_name(self): @@ -159,16 +146,16 @@ def test_milvus_client_create_alias_same_collection_name(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1601, ct.err_msg: f"alias and collection name conflict[database=default]" f"[alias={collection_name}]"} - client_w.create_alias(client, collection_name, collection_name, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_alias(client, collection_name, collection_name, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_same_alias_diff_collections(self): @@ -178,21 +165,21 @@ def test_milvus_client_create_same_alias_diff_collections(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) collection_name_1 = cf.gen_unique_str(prefix) alias = cf.gen_unique_str("collection_alias") # 1. create collection and alias - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_alias(client, collection_name, alias) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_alias(client, collection_name, alias) # 2. create another collection and same alias - client_w.create_collection(client, collection_name_1, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name_1, default_dim, consistency_level="Strong") error = {ct.err_code: 1602, ct.err_msg: f"{alias} is alias to another collection: " f"{collection_name}: alias already exist[database=default]" f"[alias={alias}]"} - client_w.create_alias(client, collection_name_1, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_alias(client, collection_name_1, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_drop_alias_not_existed(self): @@ -202,9 +189,9 @@ def test_milvus_client_drop_alias_not_existed(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("not_existed_alias") - client_w.drop_alias(client, alias) + self.drop_alias(client, alias) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("alias_name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -215,11 +202,11 @@ def test_milvus_client_drop_alias_invalid_alias_name(self, alias_name): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection alias: {alias_name}. the first character of a " f"collection alias must be an underscore or letter"} - client_w.drop_alias(client, alias_name, - check_task=CheckTasks.err_res, check_items=error) + self.drop_alias(client, alias_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_drop_alias_over_max_length(self): @@ -229,11 +216,11 @@ def test_milvus_client_drop_alias_over_max_length(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = "a".join("a" for i in range(256)) error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection alias must be less than 255 characters"} - client_w.drop_alias(client, alias, - check_task=CheckTasks.err_res, check_items=error) + self.drop_alias(client, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -244,12 +231,12 @@ def test_milvus_client_alter_alias_invalid_collection_name(self, collection_name is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.alter_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) + self.alter_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_alias_collection_name_over_max_length(self): @@ -259,13 +246,13 @@ def test_milvus_client_alter_alias_collection_name_over_max_length(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") collection_name = "a".join("a" for i in range(256)) # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.alter_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) + self.alter_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_alias_not_exist_collection(self): @@ -275,14 +262,14 @@ def test_milvus_client_alter_alias_not_exist_collection(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() alias = cf.gen_unique_str("collection_alias") collection_name = cf.gen_unique_str("not_exist_collection_alias") # 2. create alias error = {ct.err_code: 100, ct.err_msg: f"collection not found[collection={collection_name}]"} - client_w.alter_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.alter_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("alias", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -293,16 +280,16 @@ def test_milvus_client_alter_alias_invalid_alias_name(self, alias): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection alias: {alias}. the first character of a " f"collection alias must be an underscore or letter"} - client_w.alter_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.alter_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_alias_name_over_max_length(self): @@ -312,16 +299,16 @@ def test_milvus_client_alter_alias_name_over_max_length(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) alias = "a".join("a" for i in range(256)) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection alias must be less than 255 characters"} - client_w.alter_alias(client, collection_name, alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.alter_alias(client, collection_name, alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_alias_same_collection_name(self): @@ -331,16 +318,16 @@ def test_milvus_client_alter_alias_same_collection_name(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias error = {ct.err_code: 1601, ct.err_msg: f"alias and collection name conflict[database=default]" f"[alias={collection_name}"} - client_w.alter_alias(client, collection_name, collection_name, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.alter_alias(client, collection_name, collection_name, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_non_exists_alias(self): @@ -350,22 +337,22 @@ def test_milvus_client_alter_non_exists_alias(self): is equal to alias according to partitions expected: alter alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("coll") alias = cf.gen_unique_str("alias") another_alias = cf.gen_unique_str("another_alias") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias - client_w.create_alias(client, collection_name, alias) + self.create_alias(client, collection_name, alias) # 3. alter alias error = {ct.err_code: 1600, ct.err_msg: f"alias not found[database=default][alias={another_alias}]"} - client_w.alter_alias(client, collection_name, another_alias, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.alter_alias(client, collection_name, another_alias, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) -class TestMilvusClientAliasValid(TestcaseBase): +class TestMilvusClientAliasValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -389,42 +376,42 @@ def test_milvus_client_alias_search_query(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) alias = "collection_alias" # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create alias - client_w.drop_alias(client, alias) - client_w.create_alias(client, collection_name, alias) + self.drop_alias(client, alias) + self.create_alias(client, collection_name, alias) collection_name = alias # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) - # client_w.flush(client, collection_name) - # assert client_w.num_entities(client, collection_name)[0] == default_nb + self.insert(client, collection_name, rows) + # self.flush(client, collection_name) + # assert self.num_entities(client, collection_name)[0] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.release_collection(client, collection_name) - client_w.drop_collection(client, collection_name, check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, - ct.err_msg: "cannot drop the collection via alias = collection_alias"}) - client_w.drop_alias(client, alias) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.release_collection(client, collection_name) + self.drop_collection(client, collection_name, check_task=CheckTasks.err_res, + check_items={ct.err_code: 65535, + ct.err_msg: "cannot drop the collection via alias = collection_alias"}) + self.drop_alias(client, alias) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.xfail(reason="pymilvus issue 1891, 1892") @@ -435,28 +422,28 @@ def test_milvus_client_alias_default(self): is equal to alias according to partitions expected: create alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition") alias = cf.gen_unique_str("collection_alias") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_partition(client, collection_name, partition_name) - partition_name_list = client_w.list_partitions(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_partition(client, collection_name, partition_name) + partition_name_list = self.list_partitions(client, collection_name)[0] # 2. create alias - client_w.create_alias(client, collection_name, alias) - client_w.describe_alias(client, alias) + self.create_alias(client, collection_name, alias) + self.describe_alias(client, alias) # 3. list alias - aliases = client_w.list_aliases(client)[0] + aliases = self.list_aliases(client)[0] # assert alias in aliases # 4. assert collection is equal to alias according to partitions - partition_name_list_alias = client_w.list_partitions(client, alias)[0] + partition_name_list_alias = self.list_partitions(client, alias)[0] assert partition_name_list == partition_name_list_alias # 5. drop alias - client_w.drop_alias(client, alias) - aliases = client_w.list_aliases(client)[0] + self.drop_alias(client, alias) + aliases = self.list_aliases(client)[0] # assert alias not in aliases - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_alias_default(self): @@ -466,28 +453,28 @@ def test_milvus_client_alter_alias_default(self): is equal to alias according to partitions expected: alter alias successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collectinon_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition") alias = cf.gen_unique_str("collection_alias") another_alias = cf.gen_unique_str("collection_alias_another") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_partition(client, collection_name, partition_name) - partition_name_list = client_w.list_partitions(client, collection_name)[0] - client_w.create_collection(client, another_collectinon_name, default_dim, consistency_level="Strong") - client_w.create_alias(client, another_collectinon_name, another_alias) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_partition(client, collection_name, partition_name) + partition_name_list = self.list_partitions(client, collection_name)[0] + self.create_collection(client, another_collectinon_name, default_dim, consistency_level="Strong") + self.create_alias(client, another_collectinon_name, another_alias) # 2. create alias - client_w.create_alias(client, collection_name, alias) + self.create_alias(client, collection_name, alias) # 3. alter alias - client_w.alter_alias(client, collection_name, another_alias) - client_w.describe_alias(client, alias) + self.alter_alias(client, collection_name, another_alias) + self.describe_alias(client, alias) # 3. list alias - aliases = client_w.list_aliases(client, collection_name)[0] + aliases = self.list_aliases(client, collection_name)[0] # assert alias in aliases # assert another_alias in aliases # 4. assert collection is equal to alias according to partitions - partition_name_list_alias = client_w.list_partitions(client, another_alias)[0] + partition_name_list_alias = self.list_partitions(client, another_alias)[0] assert partition_name_list == partition_name_list_alias - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_alter.py b/tests/python_client/milvus_client/test_milvus_client_alter.py index 9a15700bfdc6f..7af1eb1559b5d 100644 --- a/tests/python_client/milvus_client/test_milvus_client_alter.py +++ b/tests/python_client/milvus_client/test_milvus_client_alter.py @@ -1,53 +1,18 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq - -from base.client_base import TestcaseBase +import numbers +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks -from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() +from utils.util_pymilvus import DataType +import numpy as np -prefix = "milvus_client_api_index" -epsilon = ct.epsilon -default_nb = ct.default_nb -default_nb_medium = ct.default_nb_medium -default_nq = ct.default_nq -default_dim = ct.default_dim -default_limit = ct.default_limit -default_search_exp = "id >= 0" -exp_res = "exp_res" -default_search_string_exp = "varchar >= \"0\"" -default_search_mix_exp = "int64 >= 0 && varchar >= \"0\"" -default_invaild_string_exp = "varchar >= 0" -default_json_search_exp = "json_field[\"number\"] >= 0" -perfix_expr = 'varchar like "0%"' -default_search_field = ct.default_float_vec_field_name -default_search_params = ct.default_search_params -default_primary_key_field_name = "id" +prefix = "alter" default_vector_field_name = "vector" -default_multiple_vector_field_name = "vector_new" -default_float_field_name = ct.default_float_field_name -default_bool_field_name = ct.default_bool_field_name -default_string_field_name = ct.default_string_field_name -default_int32_array_field_name = ct.default_int32_array_field_name -default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientAlterIndex(TestcaseBase): +class TestMilvusClientAlterIndex(TestMilvusClientV2Base): @pytest.mark.tags(CaseLabel.L0) def test_milvus_client_alter_index_default(self): @@ -63,27 +28,28 @@ def test_milvus_client_alter_index_default(self): verify drop successfully expected: alter successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - idx_names, _ = client_w.list_indexes(client, collection_name, field_name=default_vector_field_name) - client_w.load_collection(client, collection_name) - res1 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0] + self.create_collection(client, collection_name, ct.default_dim, consistency_level="Strong") + idx_names, _ = self.list_indexes(client, collection_name, field_name=default_vector_field_name) + self.load_collection(client, collection_name) + res1 = self.describe_index(client, collection_name, index_name=idx_names[0])[0] assert res1.get('mmap.enabled', None) is None error = {ct.err_code: 104, - ct.err_msg: f"can't alter index on loaded collection, please release the collection first: collection already loaded[collection={collection_name}]"} + ct.err_msg: f"can't alter index on loaded collection, " + f"please release the collection first: collection already loaded[collection={collection_name}]"} # 1. alter index after load - client_w.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True}, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"], - check_task=CheckTasks.err_res, check_items=error) - client_w.release_collection(client, collection_name) + self.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True}, + check_task=CheckTasks.err_res, check_items=error) + self.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"], + check_task=CheckTasks.err_res, check_items=error) + self.release_collection(client, collection_name) # 2. alter index after release - client_w.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True}) - res2 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0] + self.alter_index_properties(client, collection_name, idx_names[0], properties={"mmap.enabled": True}) + res2 = self.describe_index(client, collection_name, index_name=idx_names[0])[0] assert res2.get('mmap.enabled', None) == 'True' - client_w.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"]) - res3 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0] + self.drop_index_properties(client, collection_name, idx_names[0], property_keys=["mmap.enabled"]) + res3 = self.describe_index(client, collection_name, index_name=idx_names[0])[0] assert res3.get('mmap.enabled', None) is None @pytest.mark.tags(CaseLabel.L1) @@ -93,10 +59,10 @@ def test_milvus_client_alter_index_unsupported_properties(self): method: 1. alter index with unsupported properties expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - schema = client_w.create_schema(client, enable_dynamic_field=False)[0] + schema = self.create_schema(client, enable_dynamic_field=False)[0] dim = 32 pk_field_name = 'id_string' vector_field_name = 'embeddings' @@ -106,24 +72,24 @@ def test_milvus_client_alter_index_unsupported_properties(self): schema.add_field(vector_field_name, DataType.FLOAT_VECTOR, dim=dim, mmap_enabled=True) schema.add_field(str_field_name, DataType.VARCHAR, max_length=max_length, mmap_enabled=True) - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name=vector_field_name, metric_type="COSINE", index_type="HNSW", params={"M": 16, "efConstruction": 100, "mmap.enabled": True}) index_params.add_index(field_name=str_field_name) - client_w.create_collection(client, collection_name, schema=schema, index_params=index_params) - client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, - check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True}, - vector_field_name: {"mmap_enabled": True}}) - client_w.release_collection(client, collection_name) - properties = client_w.describe_index(client, collection_name, index_name=vector_field_name)[0] + self.create_collection(client, collection_name, schema=schema, index_params=index_params) + self.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, + check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True}, + vector_field_name: {"mmap_enabled": True}}) + self.release_collection(client, collection_name) + properties = self.describe_index(client, collection_name, index_name=vector_field_name)[0] for p in properties.items(): if p[0] not in ["mmap.enabled"]: log.debug(f"try to alter index property: {p[0]}") error = {ct.err_code: 1, ct.err_msg: f"{p[0]} is not a configable index proptery"} new_value = p[1] + 1 if isinstance(p[1], numbers.Number) else "new_value" - client_w.alter_index_properties(client, collection_name, vector_field_name, - properties={p[0]: new_value}, - check_task=CheckTasks.err_res, check_items=error) + self.alter_index_properties(client, collection_name, vector_field_name, + properties={p[0]: new_value}, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_alter_index_unsupported_value(self): @@ -132,22 +98,22 @@ def test_milvus_client_alter_index_unsupported_value(self): method: 1. alter index with unsupported properties expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - idx_names, _ = client_w.list_indexes(client, collection_name, field_name=default_vector_field_name) - client_w.release_collection(client, collection_name) - res1 = client_w.describe_index(client, collection_name, index_name=idx_names[0])[0] + self.create_collection(client, collection_name, ct.default_dim, consistency_level="Strong") + idx_names, _ = self.list_indexes(client, collection_name, field_name=default_vector_field_name) + self.release_collection(client, collection_name) + res1 = self.describe_index(client, collection_name, index_name=idx_names[0])[0] assert res1.get('mmap.enabled', None) is None unsupported_values = [None, [], '', 20, ' ', 0.01, "new_value"] for value in unsupported_values: error = {ct.err_code: 1, ct.err_msg: f"invalid mmap.enabled value: {value}, expected: true, false"} - client_w.alter_index_properties(client, collection_name, idx_names[0], - properties={"mmap.enabled": value}, - check_task=CheckTasks.err_res, check_items=error) + self.alter_index_properties(client, collection_name, idx_names[0], + properties={"mmap.enabled": value}, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientAlterCollection(TestcaseBase): +class TestMilvusClientAlterCollection(TestMilvusClientV2Base): @pytest.mark.tags(CaseLabel.L0) def test_milvus_client_alter_collection_default(self): """ @@ -161,48 +127,48 @@ def test_milvus_client_alter_collection_default(self): verify drop successfully expected: alter successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.load_collection(client, collection_name) - res1 = client_w.describe_collection(client, collection_name)[0] + self.create_collection(client, collection_name, ct.default_dim, consistency_level="Strong") + self.load_collection(client, collection_name) + res1 = self.describe_collection(client, collection_name)[0] assert res1.get('properties', None) == {} # 1. alter collection properties after load - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) error = {ct.err_code: 999, ct.err_msg: "can not alter mmap properties if collection loaded"} - client_w.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True}, - check_task=CheckTasks.err_res, check_items=error) - client_w.alter_collection_properties(client, collection_name, properties={"lazyload.enabled": True}, - check_task=CheckTasks.err_res, check_items=error) + self.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True}, + check_task=CheckTasks.err_res, check_items=error) + self.alter_collection_properties(client, collection_name, properties={"lazyload.enabled": True}, + check_task=CheckTasks.err_res, check_items=error) error = {ct.err_code: 999, ct.err_msg: "can not delete mmap properties if collection loaded"} - client_w.drop_collection_properties(client, collection_name, property_keys=["mmap.enabled"], - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection_properties(client, collection_name, property_keys=["lazyload.enabled"], - check_task=CheckTasks.err_res, check_items=error) - res3 = client_w.describe_collection(client, collection_name)[0] + self.drop_collection_properties(client, collection_name, property_keys=["mmap.enabled"], + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection_properties(client, collection_name, property_keys=["lazyload.enabled"], + check_task=CheckTasks.err_res, check_items=error) + res3 = self.describe_collection(client, collection_name)[0] assert res3.get('properties', None) == {} - client_w.drop_collection_properties(client, collection_name, property_keys=["collection.ttl.seconds"]) + self.drop_collection_properties(client, collection_name, property_keys=["collection.ttl.seconds"]) assert res3.get('properties', None) == {} # 2. alter collection properties after release - client_w.release_collection(client, collection_name) - client_w.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True}) - res2 = client_w.describe_collection(client, collection_name)[0] + self.release_collection(client, collection_name) + self.alter_collection_properties(client, collection_name, properties={"mmap.enabled": True}) + res2 = self.describe_collection(client, collection_name)[0] assert res2.get('properties', None) == {'mmap.enabled': 'True'} - client_w.alter_collection_properties(client, collection_name, - properties={"collection.ttl.seconds": 100, "lazyload.enabled": True}) - res2 = client_w.describe_collection(client, collection_name)[0] + self.alter_collection_properties(client, collection_name, + properties={"collection.ttl.seconds": 100, "lazyload.enabled": True}) + res2 = self.describe_collection(client, collection_name)[0] assert res2.get('properties', None) == {'mmap.enabled': 'True', 'collection.ttl.seconds': '100', 'lazyload.enabled': 'True'} - client_w.drop_collection_properties(client, collection_name, - property_keys=["mmap.enabled", "lazyload.enabled", - "collection.ttl.seconds"]) - res3 = client_w.describe_collection(client, collection_name)[0] + self.drop_collection_properties(client, collection_name, + property_keys=["mmap.enabled", "lazyload.enabled", + "collection.ttl.seconds"]) + res3 = self.describe_collection(client, collection_name)[0] assert res3.get('properties', None) == {} -class TestMilvusClientAlterCollectionField(TestcaseBase): +class TestMilvusClientAlterCollectionField(TestMilvusClientV2Base): @pytest.mark.tags(CaseLabel.L0) def test_milvus_client_alter_collection_field_default(self): """ @@ -210,10 +176,10 @@ def test_milvus_client_alter_collection_field_default(self): method: alter varchar field max length expected: alter successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - schema = client_w.create_schema(client, enable_dynamic_field=False)[0] + schema = self.create_schema(client, enable_dynamic_field=False)[0] dim = 32 pk_field_name = 'id_string' vector_field_name = 'embeddings' @@ -225,15 +191,15 @@ def test_milvus_client_alter_collection_field_default(self): schema.add_field(str_field_name, DataType.VARCHAR, max_length=max_length, mmap_enabled=True) schema.add_field(json_field_name, DataType.JSON, mmap_enabled=False) - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name=vector_field_name, metric_type="COSINE", index_type="IVF_FLAT", params={"nlist": 128}) index_params.add_index(field_name=str_field_name) - client_w.create_collection(client, collection_name, schema=schema, index_params=index_params) - client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, - check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True}, - vector_field_name: {"mmap_enabled": True}, - json_field_name: {"mmap_enabled": False}}) + self.create_collection(client, collection_name, schema=schema, index_params=index_params) + self.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, + check_items={str_field_name: {"max_length": max_length, "mmap_enabled": True}, + vector_field_name: {"mmap_enabled": True}, + json_field_name: {"mmap_enabled": False}}) rng = np.random.default_rng(seed=19530) rows = [{ @@ -241,29 +207,29 @@ def test_milvus_client_alter_collection_field_default(self): vector_field_name: list(rng.random((1, dim))[0]), str_field_name: cf.gen_str_by_length(max_length), json_field_name: {"number": i} - } for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + } for i in range(ct.default_nb)] + self.insert(client, collection_name, rows) # 1. alter collection field before load - client_w.release_collection(client, collection_name) - new_max_length = max_length//2 + self.release_collection(client, collection_name) + new_max_length = max_length // 2 # TODO: use one format of mmap_enabled after #38443 fixed - client_w.alter_collection_field(client, collection_name, field_name=str_field_name, - field_params={"max_length": new_max_length, "mmap.enabled": False}) - client_w.alter_collection_field(client, collection_name, field_name=pk_field_name, - field_params={"max_length": new_max_length}) - client_w.alter_collection_field(client, collection_name, field_name=json_field_name, - field_params={"mmap.enabled": True}) - client_w.alter_collection_field(client, collection_name, field_name=vector_field_name, - field_params={"mmap.enabled": False}) + self.alter_collection_field(client, collection_name, field_name=str_field_name, + field_params={"max_length": new_max_length, "mmap.enabled": False}) + self.alter_collection_field(client, collection_name, field_name=pk_field_name, + field_params={"max_length": new_max_length}) + self.alter_collection_field(client, collection_name, field_name=json_field_name, + field_params={"mmap.enabled": True}) + self.alter_collection_field(client, collection_name, field_name=vector_field_name, + field_params={"mmap.enabled": False}) error = {ct.err_code: 999, ct.err_msg: f"can not modify the maxlength for non-string types"} - client_w.alter_collection_field(client, collection_name, field_name=vector_field_name, - field_params={"max_length": new_max_length}, - check_task=CheckTasks.err_res, check_items=error) - client_w.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, - check_items={str_field_name: {"max_length": new_max_length, "mmap_enabled": False}, - vector_field_name: {"mmap_enabled": False}, - json_field_name: {"mmap_enabled": True}}) + self.alter_collection_field(client, collection_name, field_name=vector_field_name, + field_params={"max_length": new_max_length}, + check_task=CheckTasks.err_res, check_items=error) + self.describe_collection(client, collection_name, check_task=CheckTasks.check_collection_fields_properties, + check_items={str_field_name: {"max_length": new_max_length, "mmap_enabled": False}, + vector_field_name: {"mmap_enabled": False}, + json_field_name: {"mmap_enabled": True}}) # verify that cannot insert data with the old max_length for alter_field in [pk_field_name, str_field_name]: @@ -273,42 +239,42 @@ def test_milvus_client_alter_collection_field_default(self): vector_field_name: list(rng.random((1, dim))[0]), str_field_name: cf.gen_str_by_length(max_length) if alter_field == str_field_name else f'title_{i}', json_field_name: {"number": i} - } for i in range(default_nb, default_nb+10)] - client_w.insert(client, collection_name, rows, check_task=CheckTasks.err_res, check_items=error) + } for i in range(ct.default_nb, ct.default_nb + 10)] + self.insert(client, collection_name, rows, check_task=CheckTasks.err_res, check_items=error) # verify that can insert data with the new max_length rows = [{ - pk_field_name: f"new_{cf.gen_str_by_length(new_max_length-4)}", + pk_field_name: f"new_{cf.gen_str_by_length(new_max_length - 4)}", vector_field_name: list(rng.random((1, dim))[0]), str_field_name: cf.gen_str_by_length(new_max_length), json_field_name: {"number": i} - } for i in range(default_nb, default_nb+10)] - client_w.insert(client, collection_name, rows) + } for i in range(ct.default_nb, ct.default_nb + 10)] + self.insert(client, collection_name, rows) # 2. alter collection field after load - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) error = {ct.err_code: 999, ct.err_msg: "can not alter collection field properties if collection loaded"} - client_w.alter_collection_field(client, collection_name, field_name=str_field_name, - field_params={"max_length": max_length, "mmap.enabled": True}, - check_task=CheckTasks.err_res, check_items=error) - client_w.alter_collection_field(client, collection_name, field_name=vector_field_name, - field_params={"mmap.enabled": True}, - check_task=CheckTasks.err_res, check_items=error) - client_w.alter_collection_field(client, collection_name, field_name=pk_field_name, - field_params={"max_length": max_length}) + self.alter_collection_field(client, collection_name, field_name=str_field_name, + field_params={"max_length": max_length, "mmap.enabled": True}, + check_task=CheckTasks.err_res, check_items=error) + self.alter_collection_field(client, collection_name, field_name=vector_field_name, + field_params={"mmap.enabled": True}, + check_task=CheckTasks.err_res, check_items=error) + self.alter_collection_field(client, collection_name, field_name=pk_field_name, + field_params={"max_length": max_length}) - res = client_w.query(client, collection_name, filter=f"{pk_field_name} in ['id_10', 'id_20']", - output_fields=["*"])[0] + res = self.query(client, collection_name, filter=f"{pk_field_name} in ['id_10', 'id_20']", + output_fields=["*"])[0] assert (len(res)) == 2 - res = client_w.query(client, collection_name, filter=f"{pk_field_name} like 'new_%'", - output_fields=["*"])[0] - assert(len(res)) == 10 + res = self.query(client, collection_name, filter=f"{pk_field_name} like 'new_%'", + output_fields=["*"])[0] + assert (len(res)) == 10 -class TestMilvusClientAlterDatabase(TestcaseBase): +class TestMilvusClientAlterDatabase(TestMilvusClientV2Base): @pytest.mark.tags(CaseLabel.L0) - # @pytest.mark.skip("reason: need to fix #38469, #38471") + # @pytest.mark.skip("reason: need to fix #38469") def test_milvus_client_alter_database_default(self): """ target: test alter database @@ -319,45 +285,43 @@ def test_milvus_client_alter_database_default(self): alter successfully expected: alter successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) + self.create_collection(client, collection_name, ct.default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) default_db = 'default' - res1 = client_w.describe_database(client, db_name=default_db)[0] + res1 = self.describe_database(client, db_name=default_db)[0] if len(res1.keys()) != 1: - client_w.drop_database_properties(client, db_name=default_db, property_keys=res1.keys()) - assert len(client_w.describe_database(client, default_db)[0].keys()) == 1 + self.drop_database_properties(client, db_name=default_db, property_keys=res1.keys()) + assert len(self.describe_database(client, default_db)[0].keys()) == 1 for need_load in [True, False]: if need_load: log.debug("alter database after load collection") - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 1. alter default database properties before load properties = {"key1": 1, "key2": "value2", "key3": [1, 2, 3], } - client_w.alter_database_properties(client, db_name=default_db, properties=properties) - res1 = client_w.describe_database(client, db_name=default_db)[0] + self.alter_database_properties(client, db_name=default_db, properties=properties) + res1 = self.describe_database(client, db_name=default_db)[0] # assert res1.properties.items() >= properties.items() assert len(res1.keys()) == 4 my_db = cf.gen_unique_str(prefix) - client_w.create_database(client, my_db, properties=properties) - res1 = client_w.describe_database(client, db_name=my_db)[0] + self.create_database(client, my_db, properties=properties) + res1 = self.describe_database(client, db_name=my_db)[0] # assert res1.properties.items() >= properties.items() assert len(res1.keys()) == 4 properties = {"key1": 2, "key2": "value3", "key3": [1, 2, 3], 'key4': 0.123} - client_w.alter_database_properties(client, db_name=my_db, properties=properties) - res1 = client_w.describe_database(client, db_name=my_db)[0] + self.alter_database_properties(client, db_name=my_db, properties=properties) + res1 = self.describe_database(client, db_name=my_db)[0] # assert res1.properties.items() >= properties.items() assert len(res1.keys()) == 5 # drop the default database properties - client_w.drop_database_properties(client, db_name=default_db, property_keys=["key1", "key2"]) - res1 = client_w.describe_database(client, db_name=default_db)[0] + self.drop_database_properties(client, db_name=default_db, property_keys=["key1", "key2"]) + res1 = self.describe_database(client, db_name=default_db)[0] assert len(res1.keys()) == 2 - client_w.drop_database_properties(client, db_name=default_db, property_keys=["key3", "key_non_exist"]) - res1 = client_w.describe_database(client, db_name=default_db)[0] + self.drop_database_properties(client, db_name=default_db, property_keys=["key3", "key_non_exist"]) + res1 = self.describe_database(client, db_name=default_db)[0] assert len(res1.keys()) == 1 # drop the user database - client_w.drop_database(client, my_db) - - + self.drop_database(client, my_db) diff --git a/tests/python_client/milvus_client/test_milvus_client_collection.py b/tests/python_client/milvus_client/test_milvus_client_collection.py index cf503895a6997..da5f5fcd88f72 100644 --- a/tests/python_client/milvus_client/test_milvus_client_collection.py +++ b/tests/python_client/milvus_client/test_milvus_client_collection.py @@ -1,28 +1,13 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from pymilvus import DataType - -from base.client_base import TestcaseBase + +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_collection" +prefix = "client_collection" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -47,7 +32,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientCollectionInvalid(TestcaseBase): +class TestMilvusClientCollectionInvalid(TestMilvusClientV2Base): """ Test case of create collection interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -72,12 +57,12 @@ def test_milvus_client_collection_invalid_collection_name(self, collection_name) method: create collection with invalid collection expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # 1. create collection error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.create_collection(client, collection_name, default_dim, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_collection_name_over_max_length(self): @@ -86,12 +71,12 @@ def test_milvus_client_collection_name_over_max_length(self): method: create collection with over max collection name length expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # 1. create collection collection_name = "a".join("a" for i in range(256)) error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.create_collection(client, collection_name, default_dim, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_collection_name_empty(self): @@ -100,22 +85,22 @@ def test_milvus_client_collection_name_empty(self): method: create collection name with empty expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # 1. create collection collection_name = " " error = {ct.err_code: 0, ct.err_msg: "collection name should not be empty: invalid parameter"} - client_w.create_collection(client, collection_name, default_dim, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) - @pytest.mark.parametrize("dim", [ct.min_dim-1, ct.max_dim+1]) + @pytest.mark.parametrize("dim", [ct.min_dim - 1, ct.max_dim + 1]) def test_milvus_client_collection_invalid_dim(self, dim): """ target: test fast create collection name with invalid dim method: create collection name with invalid dim expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: f"invalid dimension: {dim}. " @@ -123,9 +108,9 @@ def test_milvus_client_collection_invalid_dim(self, dim): if dim < ct.min_dim: error = {ct.err_code: 65535, ct.err_msg: f"invalid dimension: {dim}. " f"should be in range 2 ~ 32768"} - client_w.create_collection(client, collection_name, dim, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, dim, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.xfail(reason="pymilvus issue 1554") @@ -135,12 +120,12 @@ def test_milvus_client_collection_invalid_primary_field(self): method: create collection name with invalid primary field expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"} - client_w.create_collection(client, collection_name, default_dim, id_type="invalid", - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, id_type="invalid", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_collection_string_auto_id(self): @@ -149,13 +134,13 @@ def test_milvus_client_collection_string_auto_id(self): method: create collection name with invalid primary field expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar " f"field of collection {collection_name}"} - client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_same_collection_different_params(self): @@ -164,18 +149,18 @@ def test_milvus_client_create_same_collection_different_params(self): method: create same collection with different params expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. create collection with same params - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 3. create collection with same name and different params error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, " f"collection: {collection_name}"} - client_w.create_collection(client, collection_name, default_dim+1, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim + 1, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.xfail(reason="pymilvus issue 1872") @@ -186,13 +171,13 @@ def test_milvus_client_collection_invalid_metric_type(self, metric_type): method: create same collection with invalid metric type expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"} - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="pymilvus issue 1864") @@ -202,20 +187,20 @@ def test_milvus_client_collection_invalid_schema_field_name(self): method: create collection with invalid schema field name expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - schema = client_w.create_schema(client, enable_dynamic_field=False)[0] + schema = self.create_schema(client, enable_dynamic_field=False)[0] schema.add_field("%$#", DataType.VARCHAR, max_length=64, - is_primary=True, auto_id = False) + is_primary=True, auto_id=False) schema.add_field("embeddings", DataType.FLOAT_VECTOR, dim=128) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: "metric type not found or not supported, supported: [L2 IP COSINE HAMMING JACCARD]"} - client_w.create_collection(client, collection_name, schema=schema, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, schema=schema, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientCollectionValid(TestcaseBase): +class TestMilvusClientCollectionValid(TestMilvusClientV2Base): """ Test case of create collection interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -244,25 +229,25 @@ def test_milvus_client_collection_fast_creation_default(self, dim): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.using_database(client, "default") + self.using_database(client, "default") # 1. create collection - client_w.create_collection(client, collection_name, dim) - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, dim) + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": dim, - "consistency_level": 0}) - index = client_w.list_indexes(client, collection_name)[0] + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": dim, + "consistency_level": 0}) + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] - client_w.load_partitions(client, collection_name, "_default") - client_w.release_partitions(client, collection_name, "_default") - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + # load_state = self.get_load_state(collection_name)[0] + self.load_partitions(client, collection_name, "_default") + self.release_partitions(client, collection_name, "_default") + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("dim", [ct.min_dim, default_dim, ct.max_dim]) @@ -272,24 +257,24 @@ def test_milvus_client_collection_fast_creation_all_params(self, dim, metric_typ method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) max_length = 100 # 1. create collection - client_w.create_collection(client, collection_name, dim, id_type=id_type, metric_type=metric_type, - auto_id=auto_id, max_length=max_length) - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, dim, id_type=id_type, metric_type=metric_type, + auto_id=auto_id, max_length=max_length) + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": dim, - "consistency_level": 0}) - index = client_w.list_indexes(client, collection_name)[0] + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": dim, + "consistency_level": 0}) + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] - client_w.release_collection(client, collection_name) - client_w.drop_collection(client, collection_name) + # load_state = self.get_load_state(collection_name)[0] + self.release_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L0) @pytest.mark.parametrize("nullable", [True, False]) @@ -299,22 +284,22 @@ def test_milvus_client_collection_self_creation_default(self, nullable): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) dim = 128 # 1. create collection - schema = client_w.create_schema(client, enable_dynamic_field=False)[0] - schema.add_field("id_string", DataType.VARCHAR, max_length=64, is_primary=True, auto_id = False) + schema = self.create_schema(client, enable_dynamic_field=False)[0] + schema.add_field("id_string", DataType.VARCHAR, max_length=64, is_primary=True, auto_id=False) schema.add_field("embeddings", DataType.FLOAT_VECTOR, dim=dim) schema.add_field("title", DataType.VARCHAR, max_length=64, is_partition_key=True) schema.add_field("nullable_field", DataType.INT64, nullable=nullable, default_value=10) schema.add_field("array_field", DataType.ARRAY, element_type=DataType.INT64, max_capacity=12, max_length=64, nullable=nullable) - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index("embeddings", metric_type="COSINE") # index_params.add_index("title") - client_w.create_collection(client, collection_name, dimension=dim, schema=schema, index_params=index_params) - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, dimension=dim, schema=schema, index_params=index_params) + collections = self.list_collections(client)[0] assert collection_name in collections check_items = {"collection_name": collection_name, "dim": dim, @@ -325,13 +310,13 @@ def test_milvus_client_collection_self_creation_default(self, nullable): "vector_name": "embeddings"} if nullable: check_items["nullable_fields"] = ["nullable_field", "array_field"] - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items=check_items) - index = client_w.list_indexes(client, collection_name)[0] + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items=check_items) + index = self.list_indexes(client, collection_name)[0] assert index == ['embeddings'] - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_array_insert_search(self): @@ -340,11 +325,11 @@ def test_milvus_client_array_insert_search(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections # 2. insert rng = np.random.default_rng(seed=19530) @@ -352,19 +337,19 @@ def test_milvus_client_array_insert_search(self): default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, - default_int32_array_field_name: [i, i+1, i+2], + default_int32_array_field_name: [i, i + 1, i + 2], default_string_array_field_name: [str(i), str(i + 1), str(i + 2)] } for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="issue 25110") @@ -374,36 +359,37 @@ def test_milvus_client_search_query_string(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "auto_id": auto_id}) + self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) - client_w.flush(client, collection_name) - assert client_w.num_entities(client, collection_name)[0] == default_nb + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + self.insert(client, collection_name, rows) + # self.flush(client, collection_name) + # assert self.num_entities(client, collection_name)[0] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter="id in [0, 1]", + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_search_different_metric_types_not_specifying_in_search_params(self, metric_type, auto_id): @@ -412,11 +398,11 @@ def test_milvus_client_search_different_metric_types_not_specifying_in_search_pa method: create connection, collection, insert and search expected: search successfully with limit(topK) """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, - consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, + consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -424,17 +410,17 @@ def test_milvus_client_search_different_metric_types_not_specifying_in_search_pa if auto_id: for row in rows: row.pop(default_primary_key_field_name) - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) # search_params = {"metric_type": metric_type} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - output_fields=[default_primary_key_field_name], - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + output_fields=[default_primary_key_field_name], + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("pymilvus issue #1866") @@ -444,11 +430,11 @@ def test_milvus_client_search_different_metric_types_specifying_in_search_params method: create connection, collection, insert and search expected: search successfully with limit(topK) """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, - consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, + consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -456,18 +442,18 @@ def test_milvus_client_search_different_metric_types_specifying_in_search_params if auto_id: for row in rows: row.pop(default_primary_key_field_name) - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) search_params = {"metric_type": metric_type} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - search_params=search_params, - output_fields=[default_primary_key_field_name], - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + search_params=search_params, + output_fields=[default_primary_key_field_name], + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_ids(self): @@ -476,19 +462,19 @@ def test_milvus_client_delete_with_ids(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, ids=[i for i in range(delete_num)]) + self.delete(client, collection_name, ids=[i for i in range(delete_num)]) # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -496,19 +482,19 @@ def test_milvus_client_delete_with_ids(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_filters(self): @@ -517,19 +503,19 @@ def test_milvus_client_delete_with_filters(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, filter=f"id < {delete_num}") + self.delete(client, collection_name, filter=f"id < {delete_num}") # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -537,19 +523,19 @@ def test_milvus_client_delete_with_filters(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_collection_rename_collection(self): @@ -558,33 +544,33 @@ def test_milvus_client_collection_rename_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim) + collections = self.list_collections(client)[0] assert collection_name in collections old_name = collection_name new_name = collection_name + "new" - client_w.rename_collection(client, old_name, new_name) - collections = client_w.list_collections(client)[0] + self.rename_collection(client, old_name, new_name) + collections = self.list_collections(client)[0] assert new_name in collections assert old_name not in collections - client_w.describe_collection(client, new_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": new_name, - "dim": default_dim, - "consistency_level": 0}) - index = client_w.list_indexes(client, new_name)[0] + self.describe_collection(client, new_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": new_name, + "dim": default_dim, + "consistency_level": 0}) + index = self.list_indexes(client, new_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] error = {ct.err_code: 100, ct.err_msg: f"collection not found"} - client_w.load_partitions(client, old_name, "_default", - check_task=CheckTasks.err_res, check_items=error) - client_w.load_partitions(client, new_name, "_default") - client_w.release_partitions(client, new_name, "_default") - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, new_name) + self.load_partitions(client, old_name, "_default", + check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, new_name, "_default") + self.release_partitions(client, new_name, "_default") + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, new_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="db not ready") @@ -594,44 +580,45 @@ def test_milvus_client_collection_rename_collection_target_db(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim) + collections = self.list_collections(client)[0] assert collection_name in collections db_name = "new_db" - client_w.using_database(client, db_name) + self.using_database(client, db_name) old_name = collection_name new_name = collection_name + "new" - client_w.rename_collection(client, old_name, new_name, target_db=db_name) - collections = client_w.list_collections(client)[0] + self.rename_collection(client, old_name, new_name, target_db=db_name) + collections = self.list_collections(client)[0] assert new_name in collections assert old_name not in collections - client_w.describe_collection(client, new_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": new_name, - "dim": default_dim, - "consistency_level": 0}) - index = client_w.list_indexes(client, new_name)[0] + self.describe_collection(client, new_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": new_name, + "dim": default_dim, + "consistency_level": 0}) + index = self.list_indexes(client, new_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] error = {ct.err_code: 100, ct.err_msg: f"collection not found"} - client_w.load_partitions(client, old_name, "_default", - check_task=CheckTasks.err_res, check_items=error) - client_w.load_partitions(client, new_name, "_default") - client_w.release_partitions(client, new_name, "_default") - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, new_name) + self.load_partitions(client, old_name, "_default", + check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, new_name, "_default") + self.release_partitions(client, new_name, "_default") + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, new_name) -class TestMilvusClientDropCollectionInvalid(TestcaseBase): +class TestMilvusClientDropCollectionInvalid(TestMilvusClientV2Base): """ Test case of drop collection interface """ """ ****************************************************************** # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) def test_milvus_client_drop_collection_invalid_collection_name(self, name): @@ -640,11 +627,11 @@ def test_milvus_client_drop_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. " f"the first character of a collection name must be an underscore or letter"} - client_w.drop_collection(client, name, - check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_drop_collection_not_existed(self): @@ -653,18 +640,19 @@ def test_milvus_client_drop_collection_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("nonexisted") - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) -class TestMilvusClientReleaseCollectionInvalid(TestcaseBase): +class TestMilvusClientReleaseCollectionInvalid(TestMilvusClientV2Base): """ Test case of release collection interface """ """ ****************************************************************** # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) def test_milvus_client_release_collection_invalid_collection_name(self, name): @@ -673,12 +661,12 @@ def test_milvus_client_release_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. " f"the first character of a collection name must be an underscore or letter"} - client_w.release_collection(client, name, - check_task=CheckTasks.err_res, check_items=error) + self.release_collection(client, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_collection_not_existed(self): @@ -687,12 +675,12 @@ def test_milvus_client_release_collection_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("nonexisted") error = {ct.err_code: 1100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.release_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.release_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_release_collection_name_over_max_length(self): @@ -701,15 +689,15 @@ def test_milvus_client_release_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # 1. create collection collection_name = "a".join("a" for i in range(256)) error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.release_collection(client, collection_name, default_dim, - check_task=CheckTasks.err_res, check_items=error) + self.release_collection(client, collection_name, default_dim, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientReleaseCollectionValid(TestcaseBase): +class TestMilvusClientReleaseCollectionValid(TestMilvusClientV2Base): """ Test case of release collection interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -737,14 +725,14 @@ def test_milvus_client_release_unloaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.release_collection(client, collection_name) - client_w.release_collection(client, collection_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim) + self.release_collection(client, collection_name) + self.release_collection(client, collection_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partially_loaded_collection(self): @@ -753,28 +741,29 @@ def test_milvus_client_load_partially_loaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition") # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.create_partition(client, collection_name, partition_name) - client_w.release_partitions(client, collection_name, ["_default", partition_name]) - client_w.release_collection(client, collection_name) - client_w.load_collection(client, collection_name) - client_w.release_partitions(client, collection_name, [partition_name]) - client_w.release_collection(client, collection_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) - - -class TestMilvusClientLoadCollectionInvalid(TestcaseBase): + self.create_collection(client, collection_name, default_dim) + self.create_partition(client, collection_name, partition_name) + self.release_partitions(client, collection_name, ["_default", partition_name]) + self.release_collection(client, collection_name) + self.load_collection(client, collection_name) + self.release_partitions(client, collection_name, [partition_name]) + self.release_collection(client, collection_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) + + +class TestMilvusClientLoadCollectionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ ****************************************************************** # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) def test_milvus_client_load_collection_invalid_collection_name(self, name): @@ -783,12 +772,12 @@ def test_milvus_client_load_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. " f"the first character of a collection name must be an underscore or letter"} - client_w.load_collection(client, name, - check_task=CheckTasks.err_res, check_items=error) + self.load_collection(client, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_collection_not_existed(self): @@ -797,12 +786,12 @@ def test_milvus_client_load_collection_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("nonexisted") error = {ct.err_code: 1100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.load_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_collection_over_max_length(self): @@ -811,13 +800,13 @@ def test_milvus_client_load_collection_over_max_length(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. " f"the length of a collection name must be less than 255 characters: " f"invalid parameter"} - client_w.load_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_load_collection_without_index(self): @@ -826,20 +815,20 @@ def test_milvus_client_load_collection_without_index(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim) + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") error = {ct.err_code: 700, ct.err_msg: f"index not found[collection={collection_name}]"} - client_w.load_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + self.load_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) -class TestMilvusClientLoadCollectionValid(TestcaseBase): +class TestMilvusClientLoadCollectionValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -867,13 +856,13 @@ def test_milvus_client_load_loaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.load_collection(client, collection_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim) + self.load_collection(client, collection_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partially_loaded_collection(self): @@ -882,29 +871,30 @@ def test_milvus_client_load_partially_loaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition") # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.create_partition(client, collection_name, partition_name) - client_w.release_collection(client, collection_name) - client_w.load_partitions(client, collection_name, [partition_name]) - client_w.load_collection(client, collection_name) - client_w.release_collection(client, collection_name) - client_w.load_partitions(client, collection_name, ["_default", partition_name]) - client_w.load_collection(client, collection_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) - - -class TestMilvusClientDescribeCollectionInvalid(TestcaseBase): + self.create_collection(client, collection_name, default_dim) + self.create_partition(client, collection_name, partition_name) + self.release_collection(client, collection_name) + self.load_partitions(client, collection_name, [partition_name]) + self.load_collection(client, collection_name) + self.release_collection(client, collection_name) + self.load_partitions(client, collection_name, ["_default", partition_name]) + self.load_collection(client, collection_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) + + +class TestMilvusClientDescribeCollectionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ ****************************************************************** # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) def test_milvus_client_describe_collection_invalid_collection_name(self, name): @@ -913,12 +903,12 @@ def test_milvus_client_describe_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. " f"the first character of a collection name must be an underscore or letter"} - client_w.describe_collection(client, name, - check_task=CheckTasks.err_res, check_items=error) + self.describe_collection(client, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_describe_collection_not_existed(self): @@ -927,11 +917,11 @@ def test_milvus_client_describe_collection_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "nonexisted" error = {ct.err_code: 100, ct.err_msg: "can't find collection[database=default][collection=nonexisted]"} - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.describe_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_describe_collection_deleted_collection(self): @@ -940,23 +930,24 @@ def test_milvus_client_describe_collection_deleted_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim) + self.drop_collection(client, collection_name) error = {ct.err_code: 100, ct.err_msg: f"can't find collection[database=default][collection={collection_name}]"} - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.describe_collection(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientHasCollectionInvalid(TestcaseBase): +class TestMilvusClientHasCollectionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ ****************************************************************** # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) def test_milvus_client_has_collection_invalid_collection_name(self, name): @@ -965,12 +956,12 @@ def test_milvus_client_has_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. " f"the first character of a collection name must be an underscore or letter"} - client_w.has_collection(client, name, - check_task=CheckTasks.err_res, check_items=error) + self.has_collection(client, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_has_collection_not_existed(self): @@ -979,9 +970,9 @@ def test_milvus_client_has_collection_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "nonexisted" - result = client_w.has_collection(client, collection_name)[0] + result = self.has_collection(client, collection_name)[0] assert result == False @pytest.mark.tags(CaseLabel.L2) @@ -991,16 +982,16 @@ def test_milvus_client_has_collection_deleted_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.drop_collection(client, collection_name) - result = client_w.has_collection(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim) + self.drop_collection(client, collection_name) + result = self.has_collection(client, collection_name)[0] assert result == False -class TestMilvusClientRenameCollectionInValid(TestcaseBase): +class TestMilvusClientRenameCollectionInValid(TestMilvusClientV2Base): """ Test case of rename collection interface """ """ @@ -1017,10 +1008,10 @@ def test_milvus_client_rename_collection_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=1][collection={name}]"} - client_w.rename_collection(client, name, "new_collection", - check_task=CheckTasks.err_res, check_items=error) + self.rename_collection(client, name, "new_collection", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_rename_collection_not_existed_collection(self): @@ -1029,11 +1020,11 @@ def test_milvus_client_rename_collection_not_existed_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "nonexisted" error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=1][collection={collection_name}]"} - client_w.rename_collection(client, collection_name, "new_collection", - check_task=CheckTasks.err_res, check_items=error) + self.rename_collection(client, collection_name, "new_collection", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_rename_collection_duplicated_collection(self): @@ -1042,14 +1033,14 @@ def test_milvus_client_rename_collection_duplicated_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"duplicated new collection name default:{collection_name} " f"with other collection name or alias"} - client_w.rename_collection(client, collection_name, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.rename_collection(client, collection_name, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_rename_deleted_collection(self): @@ -1058,17 +1049,17 @@ def test_milvus_client_rename_deleted_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim) + self.drop_collection(client, collection_name) error = {ct.err_code: 100, ct.err_msg: f"{collection_name}: collection not found[collection=default]"} - client_w.rename_collection(client, collection_name, "new_collection", - check_task=CheckTasks.err_res, check_items=error) + self.rename_collection(client, collection_name, "new_collection", + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientRenameCollectionValid(TestcaseBase): +class TestMilvusClientRenameCollectionValid(TestMilvusClientV2Base): """ Test case of rename collection interface """ """ @@ -1084,15 +1075,15 @@ def test_milvus_client_rename_collection_multiple_times(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. rename with invalid new_name new_name = "new_name_rename" - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) times = 3 for _ in range(times): - client_w.rename_collection(client, collection_name, new_name) - client_w.rename_collection(client, new_name, collection_name) + self.rename_collection(client, collection_name, new_name) + self.rename_collection(client, new_name, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_rename_collection_deleted_collection(self): @@ -1101,19 +1092,19 @@ def test_milvus_client_rename_collection_deleted_collection(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collection_name = cf.gen_unique_str("another_collection") # 1. create 2 collections - client_w.create_collection(client, collection_name, default_dim) - client_w.create_collection(client, another_collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) + self.create_collection(client, another_collection_name, default_dim) # 2. drop one collection - client_w.drop_collection(client, another_collection_name) + self.drop_collection(client, another_collection_name) # 3. rename to dropped collection - client_w.rename_collection(client, collection_name, another_collection_name) + self.rename_collection(client, collection_name, another_collection_name) -class TestMilvusClientUsingDatabaseInvalid(TestcaseBase): +class TestMilvusClientUsingDatabaseInvalid(TestMilvusClientV2Base): """ Test case of using database interface """ """ @@ -1121,6 +1112,7 @@ class TestMilvusClientUsingDatabaseInvalid(TestcaseBase): # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L2) @pytest.mark.xfail(reason="pymilvus issue 1900") @pytest.mark.parametrize("db_name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -1130,11 +1122,11 @@ def test_milvus_client_using_database_not_exist_db_name(self, db_name): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # db_name = cf.gen_unique_str("nonexisted") error = {ct.err_code: 999, ct.err_msg: f"database not found[database={db_name}]"} - client_w.using_database(client, db_name, - check_task=CheckTasks.err_res, check_items=error) + self.using_database(client, db_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="# this case is dup to using a non exist db name, try to add one for create database") diff --git a/tests/python_client/milvus_client/test_milvus_client_delete.py b/tests/python_client/milvus_client/test_milvus_client_delete.py index 6159668155788..96833bd33374e 100644 --- a/tests/python_client/milvus_client/test_milvus_client_delete.py +++ b/tests/python_client/milvus_client/test_milvus_client_delete.py @@ -1,27 +1,13 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_delete" +prefix = "client_delete" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -46,7 +32,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientDeleteInvalid(TestcaseBase): +class TestMilvusClientDeleteInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -62,6 +48,7 @@ def metric_type(self, request): # The following are invalid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_filters_and_ids(self): """ @@ -69,24 +56,24 @@ def test_milvus_client_delete_with_filters_and_ids(self): method: create connection, collection, insert, delete, and search expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, ids=[i for i in range(delete_num)], filter=f"id < {delete_num}", - check_task=CheckTasks.err_res, - check_items={"err_code": 1, - "err_msg": "Ambiguous filter parameter, " - "only one deletion condition can be specified."}) - client_w.drop_collection(client, collection_name) + self.delete(client, collection_name, ids=[i for i in range(delete_num)], filter=f"id < {delete_num}", + check_task=CheckTasks.err_res, + check_items={"err_code": 1, + "err_msg": "Ambiguous filter parameter, " + "only one deletion condition can be specified."}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.xfail(reason="pymilvus issue 1869") @@ -96,15 +83,15 @@ def test_milvus_client_delete_with_invalid_id_type(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. delete - client_w.delete(client, collection_name, ids=0, - check_task=CheckTasks.err_res, - check_items={"err_code": 1, - "err_msg": "expr cannot be empty"}) + self.delete(client, collection_name, ids=0, + check_task=CheckTasks.err_res, + check_items={"err_code": 1, + "err_msg": "expr cannot be empty"}) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_delete_with_not_all_required_params(self): @@ -113,18 +100,18 @@ def test_milvus_client_delete_with_not_all_required_params(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. delete - client_w.delete(client, collection_name, - check_task=CheckTasks.err_res, - check_items={"err_code": 999, - "err_msg": "The type of expr must be string ,but is given."}) + self.delete(client, collection_name, + check_task=CheckTasks.err_res, + check_items={"err_code": 999, + "err_msg": "The type of expr must be string ,but is given."}) -class TestMilvusClientDeleteValid(TestcaseBase): +class TestMilvusClientDeleteValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -148,19 +135,19 @@ def test_milvus_client_delete_with_ids(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, ids=[i for i in range(delete_num)]) + self.delete(client, collection_name, ids=[i for i in range(delete_num)]) # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -168,19 +155,19 @@ def test_milvus_client_delete_with_ids(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_filters(self): @@ -189,19 +176,19 @@ def test_milvus_client_delete_with_filters(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, filter=f"id < {delete_num}") + self.delete(client, collection_name, filter=f"id < {delete_num}") # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -209,19 +196,19 @@ def test_milvus_client_delete_with_filters(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_filters_partition(self): @@ -230,21 +217,21 @@ def test_milvus_client_delete_with_filters_partition(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. get partition lists - partition_names = client_w.list_partitions(client, collection_name) + partition_names = self.list_partitions(client, collection_name) # 4. delete delete_num = 3 - client_w.delete(client, collection_name, filter=f"id < {delete_num}", partition_names=partition_names) + self.delete(client, collection_name, filter=f"id < {delete_num}", partition_names=partition_names) # 5. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -252,16 +239,16 @@ def test_milvus_client_delete_with_filters_partition(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 6. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) \ No newline at end of file + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_index.py b/tests/python_client/milvus_client/test_milvus_client_index.py index ac718528484e5..bc19b92e79ef5 100644 --- a/tests/python_client/milvus_client/test_milvus_client_index.py +++ b/tests/python_client/milvus_client/test_milvus_client_index.py @@ -1,27 +1,13 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_index" +prefix = "client_index" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -47,7 +33,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientIndexInvalid(TestcaseBase): +class TestMilvusClientIndexInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -72,21 +58,21 @@ def test_milvus_client_index_invalid_collection_name(self, name): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector") + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector") # 3. create index error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. the first character of a collection " f"name must be an underscore or letter: invalid parameter"} - client_w.create_index(client, name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_index(client, name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["a".join("a" for i in range(256))]) @@ -96,21 +82,21 @@ def test_milvus_client_index_collection_name_over_max_length(self, name): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector") + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector") # 3. create index error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. the length of a collection name " f"must be less than 255 characters: invalid parameter"} - client_w.create_index(client, name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_index(client, name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_index_not_exist_collection_name(self): @@ -119,21 +105,22 @@ def test_milvus_client_index_not_exist_collection_name(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) not_existed_collection_name = cf.gen_unique_str("not_existed_collection") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name="vector") # 3. create index - error = {ct.err_code: 100, ct.err_msg: f"can't find collection[database=default][collection={not_existed_collection_name}]"} - client_w.create_index(client, not_existed_collection_name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + error = {ct.err_code: 100, + ct.err_msg: f"can't find collection[database=default][collection={not_existed_collection_name}]"} + self.create_index(client, not_existed_collection_name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="pymilvus issue 1885") @@ -144,21 +131,21 @@ def test_milvus_client_index_invalid_index_type(self, index): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index) # 3. create index error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not " f"found[database=default][collection=not_existed]"} - client_w.create_index(client, collection_name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_index(client, collection_name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="pymilvus issue 1885") @@ -169,21 +156,21 @@ def test_milvus_client_index_invalid_metric_type(self, metric): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", metric_type = metric) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", metric_type=metric) # 3. create index error = {ct.err_code: 100, ct.err_msg: f"can't find collection collection not " f"found[database=default][collection=not_existed]"} - client_w.create_index(client, collection_name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_index(client, collection_name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_index_drop_index_before_release(self): @@ -192,15 +179,15 @@ def test_milvus_client_index_drop_index_before_release(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") error = {ct.err_code: 65535, ct.err_msg: f"index cannot be dropped, collection is loaded, " f"please release it first"} - client_w.drop_index(client, collection_name, "vector", - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.drop_index(client, collection_name, "vector", + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="pymilvus issue 1886") @@ -210,23 +197,23 @@ def test_milvus_client_index_multiple_indexes_one_field(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type="HNSW", metric_type="IP") + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type="HNSW", metric_type="IP") # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. prepare index params - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name="vector", index_type="IVF_FLAT", metric_type="L2") error = {ct.err_code: 1100, ct.err_msg: f""} # 5. create another index - client_w.create_index(client, collection_name, index_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_index(client, collection_name, index_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="pymilvus issue 1886") @@ -236,19 +223,19 @@ def test_milvus_client_create_diff_index_without_release(self): method: create connection, collection, insert and search expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type="HNSW", metric_type="L2") + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type="HNSW", metric_type="L2") # 3. create index - client_w.create_index(client, collection_name, index_params) - client_w.drop_collection(client, collection_name) + self.create_index(client, collection_name, index_params) + self.drop_collection(client, collection_name) -class TestMilvusClientIndexValid(TestcaseBase): +class TestMilvusClientIndexValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -280,44 +267,44 @@ def test_milvus_client_index_default(self, index, params, metric_type): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. create same index twice - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 5. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 6. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 7. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 8. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="pymilvus issue 1884") @@ -330,42 +317,42 @@ def test_milvus_client_index_with_params(self, index, params, metric_type): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index, params=params,metric_type = metric_type) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index, params=params, metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 5. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 6. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 7. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("wait for modification") @@ -378,40 +365,40 @@ def test_milvus_client_index_after_insert(self, index, params, metric_type): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) # 4. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 5. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 5. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip("wait for modification") @@ -421,49 +408,49 @@ def test_milvus_client_index_auto_index(self, scalar_index, metric_type): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params index = "AUTOINDEX" - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) index_params.add_index(field_name="id", index_type=scalar_index, metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. drop index - client_w.drop_index(client, collection_name, "vector") - client_w.drop_index(client, collection_name, "id") + self.drop_index(client, collection_name, "vector") + self.drop_index(client, collection_name, "id") # 5. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 6. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 7. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 8. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 9. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("wait for modification") @@ -473,45 +460,45 @@ def test_milvus_client_index_multiple_vectors(self, scalar_index, metric_type): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params index = "AUTOINDEX" - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) index_params.add_index(field_name="id", index_type=scalar_index, metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i), default_multiple_vector_field_name: list(rng.random((1, default_dim))[0])} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 5. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 6. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 7. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("wait for modification") @@ -524,46 +511,46 @@ def test_milvus_client_index_drop_create_same_index(self, index, params, metric_ method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] - index_params.add_index(field_name = "vector", index_type=index, metric_type = metric_type) + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. drop index - client_w.drop_index(client, collection_name, "vector") + self.drop_index(client, collection_name, "vector") # 4. create same index twice - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 5. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 6. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 7. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 8. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("wait for modification") @@ -576,44 +563,44 @@ def test_milvus_client_index_drop_create_different_index(self, index, params, me method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") - res = client_w.list_indexes(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") + res = self.list_indexes(client, collection_name)[0] assert res == [] # 2. prepare index params - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(field_name="vector", metric_type=metric_type) # 3. create index - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 4. drop index - client_w.drop_index(client, collection_name, "vector") + self.drop_index(client, collection_name, "vector") # 4. create different index index_params.add_index(field_name="vector", index_type=index, metric_type=metric_type) - client_w.create_index(client, collection_name, index_params) + self.create_index(client, collection_name, index_params) # 5. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 6. load collection - client_w.load_collection(client, collection_name) + self.load_collection(client, collection_name) # 7. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 8. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) \ No newline at end of file + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_insert.py b/tests/python_client/milvus_client/test_milvus_client_insert.py index c42d68d16dbe4..65db0c9abe7bb 100644 --- a/tests/python_client/milvus_client/test_milvus_client_insert.py +++ b/tests/python_client/milvus_client/test_milvus_client_insert.py @@ -1,27 +1,13 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_insert" +prefix = "client_insert" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -46,7 +32,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientInsertInvalid(TestcaseBase): +class TestMilvusClientInsertInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -71,17 +57,17 @@ def test_milvus_client_insert_column_data(self): method: create connection, collection, insert and search expected: raise error """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nb)] data = [[i for i in range(default_nb)], vectors] error = {ct.err_code: 1, ct.err_msg: "Unexpected error, message=<'list' object has no attribute 'items'"} - client_w.insert(client, collection_name, data, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.insert(client, collection_name, data, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_empty_collection_name(self): @@ -90,14 +76,14 @@ def test_milvus_client_insert_empty_collection_name(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "" rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: f"`collection_name` value {collection_name} is illegal"} - client_w.insert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -107,14 +93,14 @@ def test_milvus_client_insert_invalid_collection_name(self, collection_name): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.insert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_collection_name_over_max_length(self): @@ -123,14 +109,14 @@ def test_milvus_client_insert_collection_name_over_max_length(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.insert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_not_exist_collection_name(self): @@ -139,14 +125,14 @@ def test_milvus_client_insert_not_exist_collection_name(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("insert_not_exist") rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 100, ct.err_msg: f"can't find collection[database=default][collection={collection_name}]"} - client_w.insert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.parametrize("data", ["12-s", "12 s", "(mn)", "中文", "%$#", " ", ""]) @@ -156,15 +142,15 @@ def test_milvus_client_insert_data_invalid_type(self, data): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert error = {ct.err_code: 999, ct.err_msg: "wrong type of argument 'data',expected 'Dict' or list of 'Dict', got 'str'"} - client_w.insert(client, collection_name, data, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_data_vector_field_missing(self): @@ -173,10 +159,10 @@ def test_milvus_client_insert_data_vector_field_missing(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, @@ -184,8 +170,8 @@ def test_milvus_client_insert_data_vector_field_missing(self): error = {ct.err_code: 1, ct.err_msg: f"Insert missed an field `vector` to collection " f"without set nullable==true or set default_value"} - client_w.insert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_data_id_field_missing(self): @@ -194,18 +180,18 @@ def test_milvus_client_insert_data_id_field_missing(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: f"Insert missed an field `id` to collection without set nullable==true or set default_value"} - client_w.insert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_data_extra_field(self): @@ -214,18 +200,18 @@ def test_milvus_client_insert_data_extra_field(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, enable_dynamic_field=False) + self.create_collection(client, collection_name, default_dim, enable_dynamic_field=False) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: f"Attempt to insert an unexpected field `float` to collection without enabling dynamic field"} - client_w.insert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_data_dim_not_match(self): @@ -234,17 +220,18 @@ def test_milvus_client_insert_data_dim_not_match(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim+1))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + rows = [ + {default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim + 1))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 65536, ct.err_msg: f"of float data should divide the dim({default_dim})"} - client_w.insert(client, collection_name, data= rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_not_matched_data(self): @@ -253,19 +240,20 @@ def test_milvus_client_insert_not_matched_data(self): method: insert string to int primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: f"The Input data type is inconsistent with defined schema, " f"{{id}} field should be a int64"} - client_w.insert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("partition_name", ["12 s", "(mn)", "中文", "%$#", " "]) @@ -275,10 +263,10 @@ def test_milvus_client_insert_invalid_partition_name(self, partition_name): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -286,8 +274,8 @@ def test_milvus_client_insert_invalid_partition_name(self, partition_name): error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}."} if partition_name == " ": error = {ct.err_code: 1, ct.err_msg: f"Invalid partition name: . Partition name should not be empty."} - client_w.insert(client, collection_name, data=rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_not_exist_partition_name(self): @@ -296,18 +284,18 @@ def test_milvus_client_insert_not_exist_partition_name(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] partition_name = cf.gen_unique_str("partition_not_exist") error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.insert(client, collection_name, data= rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_insert_collection_partition_not_match(self): @@ -316,24 +304,24 @@ def test_milvus_client_insert_collection_partition_not_match(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collection_name = cf.gen_unique_str(prefix + "another") partition_name = cf.gen_unique_str("partition") # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.create_collection(client, another_collection_name, default_dim) - client_w.create_partition(client, another_collection_name, partition_name) + self.create_collection(client, collection_name, default_dim) + self.create_collection(client, another_collection_name, default_dim) + self.create_partition(client, another_collection_name, partition_name) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.insert(client, collection_name, data= rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.insert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientInsertValid(TestcaseBase): +class TestMilvusClientInsertValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -357,40 +345,40 @@ def test_milvus_client_insert_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - results = client_w.insert(client, collection_name, rows)[0] + results = self.insert(client, collection_name, rows)[0] assert results['insert_count'] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.release_collection(client, collection_name) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.release_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_insert_different_fields(self): @@ -399,38 +387,38 @@ def test_milvus_client_insert_different_fields(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - results = client_w.insert(client, collection_name, rows)[0] + results = self.insert(client, collection_name, rows)[0] assert results['insert_count'] == default_nb # 3. insert diff fields rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, "new_diff_str_field": str(i)} for i in range(default_nb)] - results = client_w.insert(client, collection_name, rows)[0] + results = self.insert(client, collection_name, rows)[0] assert results['insert_count'] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_insert_empty_data(self): @@ -439,24 +427,24 @@ def test_milvus_client_insert_empty_data(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rows = [] - results = client_w.insert(client, collection_name, rows)[0] + results = self.insert(client, collection_name, rows)[0] assert results['insert_count'] == 0 # 3. search rng = np.random.default_rng(seed=19530) vectors_to_search = rng.random((1, default_dim)) - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": [], - "limit": 0}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": [], + "limit": 0}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_partition(self): @@ -465,45 +453,45 @@ def test_milvus_client_insert_partition(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - partitions = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + partitions = self.list_partitions(client, collection_name)[0] assert partition_name in partitions - index = client_w.list_indexes(client, collection_name)[0] + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] # 3. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - results = client_w.insert(client, collection_name, rows, partition_name=partition_name)[0] + results = self.insert(client, collection_name, rows, partition_name=partition_name)[0] assert results['insert_count'] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) - # partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0] + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) + # partition_number = self.get_partition_stats(client, collection_name, "_default")[0] # assert partition_number == default_nb - # partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0] + # partition_number = self.get_partition_stats(client, collection_name, partition_name)[0] # assert partition_number[0]['value'] == 0 - if client_w.has_partition(client, collection_name, partition_name)[0]: - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_partition(client, collection_name, partition_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + if self.has_partition(client, collection_name, partition_name)[0]: + self.release_partitions(client, collection_name, partition_name) + self.drop_partition(client, collection_name, partition_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) -class TestMilvusClientUpsertInvalid(TestcaseBase): +class TestMilvusClientUpsertInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -528,17 +516,17 @@ def test_milvus_client_upsert_column_data(self): method: create connection, collection, insert and search expected: raise error """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert vectors = [[random.random() for _ in range(default_dim)] for _ in range(default_nb)] data = [[i for i in range(default_nb)], vectors] error = {ct.err_code: 1, ct.err_msg: "Unexpected error, message=<'list' object has no attribute 'items'"} - client_w.upsert(client, collection_name, data, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.upsert(client, collection_name, data, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_empty_collection_name(self): @@ -547,14 +535,14 @@ def test_milvus_client_upsert_empty_collection_name(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "" rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: f"`collection_name` value {collection_name} is illegal"} - client_w.upsert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("collection_name", ["12-s", "12 s", "(mn)", "中文", "%$#"]) @@ -564,14 +552,14 @@ def test_milvus_client_upsert_invalid_collection_name(self, collection_name): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.upsert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_collection_name_over_max_length(self): @@ -580,14 +568,14 @@ def test_milvus_client_upsert_collection_name_over_max_length(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1100, ct.err_msg: f"the length of a collection name must be less than 255 characters"} - client_w.upsert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_not_exist_collection_name(self): @@ -596,14 +584,14 @@ def test_milvus_client_upsert_not_exist_collection_name(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("insert_not_exist") rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 100, ct.err_msg: f"can't find collection[database=default][collection={collection_name}]"} - client_w.upsert(client, collection_name, rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("data", ["12-s", "12 s", "(mn)", "中文", "%$#", " "]) @@ -613,14 +601,14 @@ def test_milvus_client_upsert_data_invalid_type(self, data): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert error = {ct.err_code: 1, ct.err_msg: f"wrong type of argument 'data',expected 'Dict' or list of 'Dict'"} - client_w.upsert(client, collection_name, data, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_data_empty(self): @@ -629,14 +617,14 @@ def test_milvus_client_upsert_data_empty(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert error = {ct.err_code: 1, ct.err_msg: f"wrong type of argument 'data',expected 'Dict' or list of 'Dict'"} - client_w.upsert(client, collection_name, data="", - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data="", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_data_vector_field_missing(self): @@ -645,18 +633,18 @@ def test_milvus_client_upsert_data_vector_field_missing(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(10)] error = {ct.err_code: 1, ct.err_msg: "Insert missed an field `vector` to collection without set nullable==true or set default_value"} - client_w.upsert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_data_id_field_missing(self): @@ -665,18 +653,18 @@ def test_milvus_client_upsert_data_id_field_missing(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(20)] error = {ct.err_code: 1, ct.err_msg: f"Insert missed an field `id` to collection without set nullable==true or set default_value"} - client_w.upsert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_data_extra_field(self): @@ -685,19 +673,19 @@ def test_milvus_client_upsert_data_extra_field(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - dim= 32 - client_w.create_collection(client, collection_name, dim, enable_dynamic_field=False) + dim = 32 + self.create_collection(client, collection_name, dim, enable_dynamic_field=False) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(10)] error = {ct.err_code: 1, ct.err_msg: f"Attempt to insert an unexpected field `float` to collection without enabling dynamic field"} - client_w.upsert(client, collection_name, data=rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_data_dim_not_match(self): @@ -706,17 +694,18 @@ def test_milvus_client_upsert_data_dim_not_match(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim+1))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + rows = [ + {default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim + 1))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 65536, ct.err_msg: f"of float data should divide the dim({default_dim})"} - client_w.upsert(client, collection_name, data= rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_not_matched_data(self): @@ -725,18 +714,19 @@ def test_milvus_client_upsert_not_matched_data(self): method: insert string to int primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 1, ct.err_msg: "The Input data type is inconsistent with defined schema, {id} field should be a int64"} - client_w.upsert(client, collection_name, data= rows, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("partition_name", ["12 s", "(mn)", "中文", "%$#", " "]) @@ -746,10 +736,10 @@ def test_milvus_client_upsert_invalid_partition_name(self, partition_name): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -757,8 +747,8 @@ def test_milvus_client_upsert_invalid_partition_name(self, partition_name): error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}"} if partition_name == " ": error = {ct.err_code: 1, ct.err_msg: f"Invalid partition name: . Partition name should not be empty."} - client_w.upsert(client, collection_name, data= rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_upsert_not_exist_partition_name(self): @@ -767,18 +757,18 @@ def test_milvus_client_upsert_not_exist_partition_name(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] partition_name = cf.gen_unique_str("partition_not_exist") error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.upsert(client, collection_name, data= rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_upsert_collection_partition_not_match(self): @@ -787,24 +777,24 @@ def test_milvus_client_upsert_collection_partition_not_match(self): method: insert extra field than schema when enable_dynamic_field is False expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collection_name = cf.gen_unique_str(prefix + "another") partition_name = cf.gen_unique_str("partition") # 1. create collection - client_w.create_collection(client, collection_name, default_dim) - client_w.create_collection(client, another_collection_name, default_dim) - client_w.create_partition(client, another_collection_name, partition_name) + self.create_collection(client, collection_name, default_dim) + self.create_collection(client, another_collection_name, default_dim) + self.create_partition(client, another_collection_name, partition_name) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.upsert(client, collection_name, data= rows, partition_name=partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.upsert(client, collection_name, data=rows, partition_name=partition_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientUpsertValid(TestcaseBase): +class TestMilvusClientUpsertValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -828,40 +818,40 @@ def test_milvus_client_upsert_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - results = client_w.upsert(client, collection_name, rows)[0] + results = self.upsert(client, collection_name, rows)[0] assert results['upsert_count'] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.release_collection(client, collection_name) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.release_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_upsert_empty_data(self): @@ -870,24 +860,24 @@ def test_milvus_client_upsert_empty_data(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rows = [] - results = client_w.upsert(client, collection_name, rows)[0] + results = self.upsert(client, collection_name, rows)[0] assert results['upsert_count'] == 0 # 3. search rng = np.random.default_rng(seed=19530) vectors_to_search = rng.random((1, default_dim)) - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": [], - "limit": 0}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": [], + "limit": 0}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_upsert_partition(self): @@ -896,45 +886,45 @@ def test_milvus_client_upsert_partition(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - partitions = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + partitions = self.list_partitions(client, collection_name)[0] assert partition_name in partitions - index = client_w.list_indexes(client, collection_name)[0] + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] # 3. upsert to default partition - results = client_w.upsert(client, collection_name, rows, partition_name=partitions[0])[0] + results = self.upsert(client, collection_name, rows, partition_name=partitions[0])[0] assert results['upsert_count'] == default_nb # 4. upsert to non-default partition - results = client_w.upsert(client, collection_name, rows, partition_name=partition_name)[0] + results = self.upsert(client, collection_name, rows, partition_name=partition_name)[0] assert results['upsert_count'] == default_nb # 5. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) - # partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0] + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) + # partition_number = self.get_partition_stats(client, collection_name, "_default")[0] # assert partition_number == default_nb - # partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0] + # partition_number = self.get_partition_stats(client, collection_name, partition_name)[0] # assert partition_number[0]['value'] == 0 - if client_w.has_partition(client, collection_name, partition_name)[0]: - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_partition(client, collection_name, partition_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + if self.has_partition(client, collection_name, partition_name)[0]: + self.release_partitions(client, collection_name, partition_name) + self.drop_partition(client, collection_name, partition_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_insert_upsert(self): @@ -943,39 +933,39 @@ def test_milvus_client_insert_upsert(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - partitions = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + partitions = self.list_partitions(client, collection_name)[0] assert partition_name in partitions - index = client_w.list_indexes(client, collection_name)[0] + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] # 3. insert and upsert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - results = client_w.insert(client, collection_name, rows, partition_name=partition_name)[0] + results = self.insert(client, collection_name, rows, partition_name=partition_name)[0] assert results['insert_count'] == default_nb rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, "new_diff_str_field": str(i)} for i in range(default_nb)] - results = client_w.upsert(client, collection_name, rows, partition_name=partition_name)[0] + results = self.upsert(client, collection_name, rows, partition_name=partition_name)[0] assert results['upsert_count'] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) - if client_w.has_partition(client, collection_name, partition_name)[0]: - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_partition(client, collection_name, partition_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) + if self.has_partition(client, collection_name, partition_name)[0]: + self.release_partitions(client, collection_name, partition_name) + self.drop_partition(client, collection_name, partition_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_partition.py b/tests/python_client/milvus_client/test_milvus_client_partition.py index c58bf6e3a878e..bf1f99ff1107b 100644 --- a/tests/python_client/milvus_client/test_milvus_client_partition.py +++ b/tests/python_client/milvus_client/test_milvus_client_partition.py @@ -1,26 +1,11 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from pymilvus import DataType - -from base.client_base import TestcaseBase + +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() prefix = "milvus_client_api_partition" partition_prefix = "milvus_client_api_partition" @@ -48,7 +33,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientPartitionInvalid(TestcaseBase): +class TestMilvusClientPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -73,13 +58,13 @@ def test_milvus_client_partition_invalid_collection_name(self, collection_name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.create_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.create_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_partition_collection_name_over_max_length(self): @@ -88,14 +73,15 @@ def test_milvus_client_partition_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " - f"must be less than 255 characters: invalid parameter"} - client_w.create_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + error = {ct.err_code: 1100, + ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " + f"must be less than 255 characters: invalid parameter"} + self.create_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_partition_not_exist_collection_name(self): @@ -104,14 +90,14 @@ def test_milvus_client_partition_not_exist_collection_name(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("partition_not_exist") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.create_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.create_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("partition_name", ["12 s", "(mn)", "中文", "%$#"]) @@ -121,13 +107,13 @@ def test_milvus_client_partition_invalid_partition_name(self, partition_name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}"} - client_w.create_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.create_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_partition_name_lists(self): @@ -136,14 +122,14 @@ def test_milvus_client_partition_name_lists(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_names = [cf.gen_unique_str(partition_prefix), cf.gen_unique_str(partition_prefix)] # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 999, ct.err_msg: f"`partition_name` value {partition_names} is illegal"} - client_w.create_partition(client, collection_name, partition_names, - check_task=CheckTasks.err_res, check_items=error) + self.create_partition(client, collection_name, partition_names, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="Take much running time") @@ -153,25 +139,25 @@ def test_milvus_client_create_over_max_partition_num(self): method: create 4097 partitions expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_nums = 4095 # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) for i in range(partition_nums): partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - results = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + results = self.list_partitions(client, collection_name)[0] assert len(results) == partition_nums + 1 partition_name = cf.gen_unique_str(partition_prefix) error = {ct.err_code: 65535, ct.err_msg: f"partition number (4096) exceeds max configuration (4096), " f"collection: {collection_name}"} - client_w.create_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.create_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientPartitionValid(TestcaseBase): +class TestMilvusClientPartitionValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -200,51 +186,51 @@ def test_milvus_client_partition_default(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - partitions = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + partitions = self.list_partitions(client, collection_name)[0] assert partition_name in partitions - index = client_w.list_indexes(client, collection_name)[0] + index = self.list_indexes(client, collection_name)[0] assert index == ['vector'] - # load_state = client_w.get_load_state(collection_name)[0] + # load_state = self.get_load_state(collection_name)[0] # 3. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - partition_names=partitions, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + partition_names=partitions, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - res = client_w.query(client, collection_name, filter=default_search_exp, - output_fields=["vector"], partition_names=partitions, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name})[0] + res = self.query(client, collection_name, filter=default_search_exp, + output_fields=["vector"], partition_names=partitions, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name})[0] assert set(res[0].keys()) == {"ids", "vector"} - partition_number = client_w.get_partition_stats(client, collection_name, "_default")[0] + partition_number = self.get_partition_stats(client, collection_name, "_default")[0] assert partition_number == default_nb - partition_number = client_w.get_partition_stats(client, collection_name, partition_name)[0] + partition_number = self.get_partition_stats(client, collection_name, partition_name)[0] assert partition_number[0]['value'] == 0 - if client_w.has_partition(client, collection_name, partition_name)[0]: - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_partition(client, collection_name, partition_name) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + if self.has_partition(client, collection_name, partition_name)[0]: + self.release_partitions(client, collection_name, partition_name) + self.drop_partition(client, collection_name, partition_name) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_create_partition_name_existed(self): @@ -253,19 +239,19 @@ def test_milvus_client_create_partition_name_existed(self): method: create collection expected: create partition successfully with only one partition created """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) - client_w.create_partition(client, collection_name, "_default") - results = client_w.list_partitions(client, collection_name)[0] + self.create_collection(client, collection_name, default_dim) + self.create_partition(client, collection_name, "_default") + results = self.list_partitions(client, collection_name)[0] assert len(results) == 1 - client_w.create_partition(client, collection_name, partition_name) - results = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + results = self.list_partitions(client, collection_name)[0] assert len(results) == 2 - client_w.create_partition(client, collection_name, partition_name) - results = client_w.list_partitions(client, collection_name)[0] + self.create_partition(client, collection_name, partition_name) + results = self.list_partitions(client, collection_name)[0] assert len(results) == 2 @pytest.mark.tags(CaseLabel.L2) @@ -275,12 +261,12 @@ def test_milvus_client_drop_partition_not_exist_partition(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition_not_exist") # 2. create partition - client_w.create_collection(client, collection_name, default_dim) - client_w.drop_partition(client, collection_name, partition_name) + self.create_collection(client, collection_name, default_dim) + self.drop_partition(client, collection_name, partition_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_drop_partition_collection_partition_not_match(self): @@ -289,15 +275,15 @@ def test_milvus_client_drop_partition_collection_partition_not_match(self): method: drop partition in another collection expected: drop successfully without any operations """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collection_name = cf.gen_unique_str("another") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) - client_w.create_partition(client, collection_name, partition_name) - client_w.create_collection(client, another_collection_name, default_dim) - client_w.drop_partition(client, another_collection_name, partition_name) + self.create_collection(client, collection_name, default_dim) + self.create_partition(client, collection_name, partition_name) + self.create_collection(client, another_collection_name, default_dim) + self.drop_partition(client, another_collection_name, partition_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_has_partition_collection_partition_not_match(self): @@ -306,19 +292,19 @@ def test_milvus_client_has_partition_collection_partition_not_match(self): method: drop partition in another collection expected: drop successfully without any operations """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) another_collection_name = cf.gen_unique_str("another") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) - client_w.create_partition(client, collection_name, partition_name) - client_w.create_collection(client, another_collection_name, default_dim) - result = client_w.has_partition(client, another_collection_name, partition_name)[0] + self.create_collection(client, collection_name, default_dim) + self.create_partition(client, collection_name, partition_name) + self.create_collection(client, another_collection_name, default_dim) + result = self.has_partition(client, another_collection_name, partition_name)[0] assert result is False -class TestMilvusClientDropPartitionInvalid(TestcaseBase): +class TestMilvusClientDropPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -343,13 +329,13 @@ def test_milvus_client_drop_partition_invalid_collection_name(self, collection_n method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.drop_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.drop_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_drop_partition_collection_name_over_max_length(self): @@ -358,14 +344,15 @@ def test_milvus_client_drop_partition_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " - f"must be less than 255 characters: invalid parameter"} - client_w.drop_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + error = {ct.err_code: 1100, + ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " + f"must be less than 255 characters: invalid parameter"} + self.drop_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_drop_partition_not_exist_collection_name(self): @@ -374,14 +361,14 @@ def test_milvus_client_drop_partition_not_exist_collection_name(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("partition_not_exist") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.drop_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.drop_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("partition_name", ["12 s", "(mn)", "中文", "%$#"]) @@ -391,13 +378,13 @@ def test_milvus_client_drop_partition_invalid_partition_name(self, partition_nam method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}."} - client_w.drop_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.drop_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_drop_partition_name_lists(self): @@ -406,17 +393,17 @@ def test_milvus_client_drop_partition_name_lists(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_names = [cf.gen_unique_str(partition_prefix), cf.gen_unique_str(partition_prefix)] # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 1, ct.err_msg: f"`partition_name` value {partition_names} is illegal"} - client_w.drop_partition(client, collection_name, partition_names, - check_task=CheckTasks.err_res, check_items=error) + self.drop_partition(client, collection_name, partition_names, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientReleasePartitionInvalid(TestcaseBase): +class TestMilvusClientReleasePartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -441,13 +428,13 @@ def test_milvus_client_release_partition_invalid_collection_name(self, collectio method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 999, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " - f"collection name must be an underscore or letter: invalid parameter"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + f"collection name must be an underscore or letter: invalid parameter"} + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_partition_collection_name_over_max_length(self): @@ -456,14 +443,15 @@ def test_milvus_client_release_partition_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - error = {ct.err_code: 999, ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " - f"must be less than 255 characters: invalid parameter"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + error = {ct.err_code: 999, + ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " + f"must be less than 255 characters: invalid parameter"} + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_partition_not_exist_collection_name(self): @@ -472,14 +460,14 @@ def test_milvus_client_release_partition_not_exist_collection_name(self): method: release partition with not exist collection name expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("partition_not_exist") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 999, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.xfail(reason="pymilvus issue 1896") @@ -490,14 +478,14 @@ def test_milvus_client_release_partition_invalid_partition_name(self, partition_ method: release partition with invalid partition name value expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}. The first character of a " f"partition name must be an underscore or letter.]"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.xfail(reason="pymilvus issue 1896") @@ -507,15 +495,15 @@ def test_milvus_client_release_partition_invalid_partition_name_list(self): method: release partition with invalid partition name value expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) partition_name = ["12-s"] error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}. The first character of a " f"partition name must be an underscore or letter.]"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_partition_name_lists_empty(self): @@ -524,14 +512,14 @@ def test_milvus_client_release_partition_name_lists_empty(self): method: release partition with invalid partition name type expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_names = [] # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 999, ct.err_msg: f"invalid parameter[expected=any partition][actual=empty partition list"} - client_w.release_partitions(client, collection_name, partition_names, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_names, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_partition_name_lists_not_all_exists(self): @@ -540,15 +528,15 @@ def test_milvus_client_release_partition_name_lists_not_all_exists(self): method: release partition with invalid partition name type expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) not_exist_partition = cf.gen_unique_str("partition_not_exist") partition_names = ["_default", not_exist_partition] # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 999, ct.err_msg: f"partition not found[partition={not_exist_partition}]"} - client_w.release_partitions(client, collection_name, partition_names, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_names, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_release_not_exist_partition_name(self): @@ -557,21 +545,21 @@ def test_milvus_client_release_not_exist_partition_name(self): method: release partition with invalid partition name type expected: raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition_not_exist") # 2. create partition error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.create_collection(client, collection_name, default_dim) - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim) + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) partition_name = "" error = {ct.err_code: 200, ct.err_msg: f"partition not found[partition={partition_name}]"} - client_w.release_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.release_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientReleasePartitionValid(TestcaseBase): +class TestMilvusClientReleasePartitionValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -595,17 +583,17 @@ def test_milvus_client_partition_release_multiple_partitions(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) partition_names = ["_default", partition_name] # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - client_w.release_partitions(client, collection_name, partition_names) - client_w.release_partitions(client, collection_name, partition_names) - client_w.drop_collection(client, collection_name) + self.create_partition(client, collection_name, partition_name) + self.release_partitions(client, collection_name, partition_names) + self.release_partitions(client, collection_name, partition_names) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_partition_release_unloaded_partition(self): @@ -614,16 +602,16 @@ def test_milvus_client_partition_release_unloaded_partition(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - client_w.release_partitions(client, collection_name, partition_name) - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_collection(client, collection_name) + self.create_partition(client, collection_name, partition_name) + self.release_partitions(client, collection_name, partition_name) + self.release_partitions(client, collection_name, partition_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_partition_release_unloaded_collection(self): @@ -632,16 +620,16 @@ def test_milvus_client_partition_release_unloaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - client_w.release_collection(client, collection_name) - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_collection(client, collection_name) + self.create_partition(client, collection_name, partition_name) + self.release_collection(client, collection_name) + self.release_partitions(client, collection_name, partition_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_partition_release_loaded_partition(self): @@ -650,16 +638,16 @@ def test_milvus_client_partition_release_loaded_partition(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - client_w.load_partitions(client, collection_name, partition_name) - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_collection(client, collection_name) + self.create_partition(client, collection_name, partition_name) + self.load_partitions(client, collection_name, partition_name) + self.release_partitions(client, collection_name, partition_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_partition_release_loaded_collection(self): @@ -668,19 +656,19 @@ def test_milvus_client_partition_release_loaded_collection(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create partition - client_w.create_partition(client, collection_name, partition_name) - client_w.load_collection(client, collection_name) - client_w.release_partitions(client, collection_name, partition_name) - client_w.drop_collection(client, collection_name) + self.create_partition(client, collection_name, partition_name) + self.load_collection(client, collection_name) + self.release_partitions(client, collection_name, partition_name) + self.drop_collection(client, collection_name) -class TestMilvusClientListPartitionInvalid(TestcaseBase): +class TestMilvusClientListPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -705,12 +693,12 @@ def test_milvus_client_list_partitions_invalid_collection_name(self, collection_ method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() # 2. create partition error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.list_partitions(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.list_partitions(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_list_partitions_collection_name_over_max_length(self): @@ -719,13 +707,14 @@ def test_milvus_client_list_partitions_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) # 2. create partition - error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " - f"must be less than 255 characters: invalid parameter"} - client_w.list_partitions(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + error = {ct.err_code: 1100, + ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " + f"must be less than 255 characters: invalid parameter"} + self.list_partitions(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_list_partitions_not_exist_collection_name(self): @@ -734,16 +723,16 @@ def test_milvus_client_list_partitions_not_exist_collection_name(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("partition_not_exist") # 2. create partition error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.list_partitions(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.list_partitions(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientHasPartitionInvalid(TestcaseBase): +class TestMilvusClientHasPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -768,13 +757,13 @@ def test_milvus_client_has_partition_invalid_collection_name(self, collection_na method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the first character of a " f"collection name must be an underscore or letter: invalid parameter"} - client_w.has_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.has_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_has_partition_collection_name_over_max_length(self): @@ -783,14 +772,15 @@ def test_milvus_client_has_partition_collection_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition - error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " - f"must be less than 255 characters: invalid parameter"} - client_w.has_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + error = {ct.err_code: 1100, + ct.err_msg: f"Invalid collection name: {collection_name}. the length of a collection name " + f"must be less than 255 characters: invalid parameter"} + self.has_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_has_partition_not_exist_collection_name(self): @@ -799,14 +789,14 @@ def test_milvus_client_has_partition_not_exist_collection_name(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("partition_not_exist") partition_name = cf.gen_unique_str(partition_prefix) # 2. create partition error = {ct.err_code: 100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.has_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.has_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("partition_name", ["12 s", "(mn)", "中文", "%$#"]) @@ -816,13 +806,13 @@ def test_milvus_client_has_partition_invalid_partition_name(self, partition_name method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}"} - client_w.has_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.has_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_has_partition_name_over_max_length(self): @@ -831,15 +821,15 @@ def test_milvus_client_has_partition_name_over_max_length(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = "a".join("a" for i in range(256)) # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 65535, ct.err_msg: f"Invalid partition name: {partition_name}. " f"The length of a partition name must be less than 255 characters"} - client_w.has_partition(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.has_partition(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_has_partition_name_lists(self): @@ -848,14 +838,14 @@ def test_milvus_client_has_partition_name_lists(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_names = [cf.gen_unique_str(partition_prefix), cf.gen_unique_str(partition_prefix)] # 2. create partition - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) error = {ct.err_code: 1, ct.err_msg: f"`partition_name` value {partition_names} is illegal"} - client_w.has_partition(client, collection_name, partition_names, - check_task=CheckTasks.err_res, check_items=error) + self.has_partition(client, collection_name, partition_names, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_has_partition_not_exist_partition_name(self): @@ -864,17 +854,17 @@ def test_milvus_client_has_partition_not_exist_partition_name(self): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("partition_not_exist") # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. create partition - result = client_w.has_partition(client, collection_name, partition_name)[0] + result = self.has_partition(client, collection_name, partition_name)[0] assert result == False -class TestMilvusClientLoadPartitionInvalid(TestcaseBase): +class TestMilvusClientLoadPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ @@ -891,12 +881,12 @@ def test_milvus_client_load_partitions_invalid_collection_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() partition_name = cf.gen_unique_str(prefix) error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name: {name}. collection name can only " f"contain numbers, letters and underscores: invalid parameter"} - client_w.load_partitions(client, name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partitions_not_existed(self): @@ -905,13 +895,13 @@ def test_milvus_client_load_partitions_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str("nonexisted") partition_name = cf.gen_unique_str(prefix) error = {ct.err_code: 1100, ct.err_msg: f"collection not found[database=default]" f"[collection={collection_name}]"} - client_w.load_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partitions_collection_name_over_max_length(self): @@ -920,14 +910,14 @@ def test_milvus_client_load_partitions_collection_name_over_max_length(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = "a".join("a" for i in range(256)) partition_name = cf.gen_unique_str(prefix) error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. " f"the length of a collection name must be less than 255 characters: " f"invalid parameter"} - client_w.load_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.parametrize("name", ["12 s", "(mn)", "中文", "%$#"]) @@ -937,15 +927,15 @@ def test_milvus_client_load_partitions_invalid_partition_name(self, name): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. load partition error = {ct.err_code: 1100, ct.err_msg: f"Invalid partition name: {name}. collection name can only " f"contain numbers, letters and underscores: invalid parameter"} - client_w.load_partitions(client, collection_name, name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partitions_partition_not_existed(self): @@ -954,16 +944,16 @@ def test_milvus_client_load_partitions_partition_not_existed(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str("nonexisted") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. load partition error = {ct.err_code: 1100, ct.err_msg: f"partition not found[database=default]" f"[collection={collection_name}]"} - client_w.load_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partitions_partition_name_over_max_length(self): @@ -972,17 +962,17 @@ def test_milvus_client_load_partitions_partition_name_over_max_length(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = "a".join("a" for i in range(256)) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. load partition error = {ct.err_code: 1100, ct.err_msg: f"invalid dimension: {collection_name}. " f"the length of a collection name must be less than 255 characters: " f"invalid parameter"} - client_w.load_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_partitions_without_index(self): @@ -991,21 +981,21 @@ def test_milvus_client_load_partitions_without_index(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. drop index - client_w.release_collection(client, collection_name) - client_w.drop_index(client, collection_name, "vector") + self.release_collection(client, collection_name) + self.drop_index(client, collection_name, "vector") # 2. load partition error = {ct.err_code: 700, ct.err_msg: f"index not found[collection={collection_name}]"} - client_w.load_partitions(client, collection_name, partition_name, - check_task=CheckTasks.err_res, check_items=error) + self.load_partitions(client, collection_name, partition_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientLoadPartitionInvalid(TestcaseBase): +class TestMilvusClientLoadPartitionInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ @@ -1021,16 +1011,16 @@ def test_milvus_client_load_multiple_partition(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) partition_names = ["_default", partition_name] # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_partition(client, collection_name, partition_name) - client_w.release_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_partition(client, collection_name, partition_name) + self.release_collection(client, collection_name) # 2. load partition - client_w.load_partitions(client, collection_name, partition_names) + self.load_partitions(client, collection_name, partition_names) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_unloaded_partition(self): @@ -1039,16 +1029,16 @@ def test_milvus_client_load_unloaded_partition(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_partition(client, collection_name, partition_name) - client_w.release_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_partition(client, collection_name, partition_name) + self.release_collection(client, collection_name) # 2. load partition - client_w.load_partitions(client, collection_name, partition_name) - client_w.load_partitions(client, collection_name, "_default") + self.load_partitions(client, collection_name, partition_name) + self.load_partitions(client, collection_name, "_default") @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_load_unloaded_partition(self): @@ -1057,17 +1047,15 @@ def test_milvus_client_load_unloaded_partition(self): method: create collection expected: drop successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) partition_name = cf.gen_unique_str(partition_prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - client_w.create_partition(client, collection_name, partition_name) - client_w.release_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_partition(client, collection_name, partition_name) + self.release_collection(client, collection_name) # 2. load partition - client_w.load_partitions(client, collection_name, partition_name) - client_w.load_partitions(client, collection_name, partition_name) - client_w.load_collection(client, collection_name) - client_w.load_partitions(client, collection_name, partition_name) - - + self.load_partitions(client, collection_name, partition_name) + self.load_partitions(client, collection_name, partition_name) + self.load_collection(client, collection_name) + self.load_partitions(client, collection_name, partition_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_query.py b/tests/python_client/milvus_client/test_milvus_client_query.py index 457495aaf6949..1811656201b60 100644 --- a/tests/python_client/milvus_client/test_milvus_client_query.py +++ b/tests/python_client/milvus_client/test_milvus_client_query.py @@ -1,25 +1,11 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * -from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() prefix = "milvus_client_api_query" epsilon = ct.epsilon @@ -46,7 +32,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientQueryInvalid(TestcaseBase): +class TestMilvusClientQueryInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -70,29 +56,29 @@ def test_milvus_client_query_not_all_required_params(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. query using ids error = {ct.err_code: 65535, ct.err_msg: f"empty expression should be used with limit"} - client_w.query(client, collection_name, - check_task=CheckTasks.err_res, check_items=error) + self.query(client, collection_name, + check_task=CheckTasks.err_res, check_items=error) -class TestMilvusClientQueryValid(TestcaseBase): +class TestMilvusClientQueryValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -116,28 +102,28 @@ def test_milvus_client_query_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. query using ids - client_w.query(client, collection_name, ids=[i for i in range(default_nb)], - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) + self.query(client, collection_name, ids=[i for i in range(default_nb)], + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) # 4. query using filter - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_query_output_fields(self): @@ -146,32 +132,32 @@ def test_milvus_client_query_output_fields(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. query using ids - client_w.query(client, collection_name, ids=[i for i in range(default_nb)], - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) + self.query(client, collection_name, ids=[i for i in range(default_nb)], + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) # 4. query using filter - res = client_w.query(client, collection_name, filter=default_search_exp, - output_fields=[default_primary_key_field_name, default_float_field_name, - default_string_field_name, default_vector_field_name], - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name})[0] + res = self.query(client, collection_name, filter=default_search_exp, + output_fields=[default_primary_key_field_name, default_float_field_name, + default_string_field_name, default_vector_field_name], + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name})[0] assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name, default_float_field_name, default_string_field_name} - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_query_output_fields_all(self): @@ -180,31 +166,31 @@ def test_milvus_client_query_output_fields_all(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. query using ids - client_w.query(client, collection_name, ids=[i for i in range(default_nb)], - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) + self.query(client, collection_name, ids=[i for i in range(default_nb)], + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) # 4. query using filter - res = client_w.query(client, collection_name, filter=default_search_exp, - output_fields=["*"], - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name})[0] + res = self.query(client, collection_name, filter=default_search_exp, + output_fields=["*"], + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name})[0] assert set(res[0].keys()) == {default_primary_key_field_name, default_vector_field_name, default_float_field_name, default_string_field_name} - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_query_limit(self): @@ -213,34 +199,34 @@ def test_milvus_client_query_limit(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. query using ids limit = 5 - client_w.query(client, collection_name, ids=[i for i in range(default_nb)], - limit=limit, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[:limit], - "with_vec": True, - "primary_field": default_primary_key_field_name[:limit]}) + self.query(client, collection_name, ids=[i for i in range(default_nb)], + limit=limit, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[:limit], + "with_vec": True, + "primary_field": default_primary_key_field_name[:limit]}) # 4. query using filter - client_w.query(client, collection_name, filter=default_search_exp, - limit=limit, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[:limit], - "with_vec": True, - "primary_field": default_primary_key_field_name[:limit]})[0] - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + limit=limit, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[:limit], + "with_vec": True, + "primary_field": default_primary_key_field_name[:limit]}) + self.drop_collection(client, collection_name) -class TestMilvusClientGetInvalid(TestcaseBase): +class TestMilvusClientGetInvalid(TestMilvusClientV2Base): """ Test case of search interface """ """ @@ -251,29 +237,30 @@ class TestMilvusClientGetInvalid(TestcaseBase): @pytest.mark.tags(CaseLabel.L2) @pytest.mark.parametrize("name", - ["12-s", "12 s", "(mn)", "中文", "%$#", "".join("a" for i in range(ct.max_name_length + 1))]) + ["12-s", "12 s", "(mn)", "中文", "%$#", + "".join("a" for i in range(ct.max_name_length + 1))]) def test_milvus_client_get_invalid_collection_name(self, name): """ target: test get interface invalid cases method: invalid collection name expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [i for i in range(default_nb)] # 3. get first primary key error = {ct.err_code: 1100, ct.err_msg: f"Invalid collection name"} - client_w.get(client, name, ids=pks[0:1], - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.get(client, name, ids=pks[0:1], + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_get_not_exist_collection_name(self): @@ -282,50 +269,50 @@ def test_milvus_client_get_not_exist_collection_name(self): method: invalid collection name expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [i for i in range(default_nb)] # 3. get first primary key name = "invalid" error = {ct.err_code: 100, ct.err_msg: f"can't find collection[database=default][collection={name}]"} - client_w.get(client, name, ids=pks[0:1], - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.get(client, name, ids=pks[0:1], + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) - @pytest.mark.parametrize("invalid_ids",["中文", "%$#"]) + @pytest.mark.parametrize("invalid_ids", ["中文", "%$#"]) def test_milvus_client_get_invalid_ids(self, invalid_ids): """ target: test get interface invalid cases method: invalid collection name expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) # 3. get first primary key error = {ct.err_code: 1100, ct.err_msg: f"cannot parse expression"} - client_w.get(client, collection_name, ids=invalid_ids, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.get(client, collection_name, ids=invalid_ids, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) -class TestMilvusClientGetValid(TestcaseBase): +class TestMilvusClientGetValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -341,6 +328,7 @@ def metric_type(self, request): # The following are valid base cases ****************************************************************** """ + @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_get_normal(self): """ @@ -348,23 +336,23 @@ def test_milvus_client_get_normal(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [i for i in range(default_nb)] # 3. get first primary key - first_pk_data = client_w.get(client, collection_name, ids=pks[0:1])[0] + first_pk_data = self.get(client, collection_name, ids=pks[0:1])[0] assert len(first_pk_data) == len(pks[0:1]) - first_pk_data_1 = client_w.get(client, collection_name, ids=0)[0] + first_pk_data_1 = self.get(client, collection_name, ids=0)[0] assert first_pk_data == first_pk_data_1 - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_get_output_fields(self): @@ -373,27 +361,27 @@ def test_milvus_client_get_output_fields(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [i for i in range(default_nb)] # 3. get first primary key output_fields_array = [default_primary_key_field_name, default_vector_field_name, default_float_field_name, default_string_field_name] - first_pk_data = client_w.get(client, collection_name, ids=pks[0:1], output_fields=output_fields_array)[0] + first_pk_data = self.get(client, collection_name, ids=pks[0:1], output_fields=output_fields_array)[0] assert len(first_pk_data) == len(pks[0:1]) assert len(first_pk_data[0]) == len(output_fields_array) - first_pk_data_1 = client_w.get(client, collection_name, ids=0, output_fields=output_fields_array)[0] + first_pk_data_1 = self.get(client, collection_name, ids=0, output_fields=output_fields_array)[0] assert first_pk_data == first_pk_data_1 assert len(first_pk_data_1[0]) == len(output_fields_array) - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="pymilvus issue 2056") @@ -403,24 +391,24 @@ def test_milvus_client_get_normal_string(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) + self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) # 2. insert rng = np.random.default_rng(seed=19530) rows = [ {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [str(i) for i in range(default_nb)] # 3. get first primary key - first_pk_data = client_w.get(client, collection_name, ids=pks[0:1])[0] + first_pk_data = self.get(client, collection_name, ids=pks[0:1])[0] assert len(first_pk_data) == len(pks[0:1]) - first_pk_data_1 = client_w.get(client, collection_name, ids="0")[0] + first_pk_data_1 = self.get(client, collection_name, ids="0")[0] assert first_pk_data == first_pk_data_1 - client_w.drop_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="pymilvus issue 2056") @@ -430,24 +418,24 @@ def test_milvus_client_get_normal_string_output_fields(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) + self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) # 2. insert rng = np.random.default_rng(seed=19530) rows = [ {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [str(i) for i in range(default_nb)] # 3. get first primary key output_fields_array = [default_primary_key_field_name, default_vector_field_name, default_float_field_name, default_string_field_name] - first_pk_data = client_w.get(client, collection_name, ids=pks[0:1], output_fields=output_fields_array)[0] + first_pk_data = self.get(client, collection_name, ids=pks[0:1], output_fields=output_fields_array)[0] assert len(first_pk_data) == len(pks[0:1]) assert len(first_pk_data[0]) == len(output_fields_array) - first_pk_data_1 = client_w.get(client, collection_name, ids="0", output_fields=output_fields_array)[0] + first_pk_data_1 = self.get(client, collection_name, ids="0", output_fields=output_fields_array)[0] assert first_pk_data == first_pk_data_1 assert len(first_pk_data_1[0]) == len(output_fields_array) - client_w.drop_collection(client, collection_name) \ No newline at end of file + self.drop_collection(client, collection_name) diff --git a/tests/python_client/milvus_client/test_milvus_client_rbac.py b/tests/python_client/milvus_client/test_milvus_client_rbac.py index 8e3214a9a43f2..c9f2604327071 100644 --- a/tests/python_client/milvus_client/test_milvus_client_rbac.py +++ b/tests/python_client/milvus_client/test_milvus_client_rbac.py @@ -1,18 +1,10 @@ -import multiprocessing -import numbers -import random import time -import numpy import pytest -import pandas as pd from common import common_func as cf from common import common_type as ct from utils.util_log import test_log as log -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from common.common_type import CaseLabel, CheckTasks -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() - prefix = "client_rbac" user_pre = "user" @@ -36,7 +28,7 @@ @pytest.mark.tags(CaseLabel.RBAC) -class TestMilvusClientRbacBase(TestcaseBase): +class TestMilvusClientRbacBase(TestMilvusClientV2Base): """ Test case of rbac interface """ def teardown_method(self, method): @@ -45,27 +37,27 @@ def teardown_method(self, method): """ log.info("[utility_teardown_method] Start teardown utility test cases ...") uri = f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) # drop users - users, _ = self.high_level_api_wrap.list_users() + users, _ = self.list_users(client) for user in users: if user != ct.default_user: - self.high_level_api_wrap.drop_user(user) - users, _ = self.high_level_api_wrap.list_users() + self.drop_user(client, user) + users, _ = self.list_users(client) assert len(users) == 1 # drop roles - roles, _ = self.high_level_api_wrap.list_roles() + roles, _ = self.list_roles(client) for role in roles: if role not in ['admin', 'public']: - privileges, _ = self.high_level_api_wrap.describe_role(role) - if privileges: - for privilege in privileges: - self.high_level_api_wrap.revoke_privilege(role, privilege["object_type"], - privilege["privilege"], privilege["object_name"]) - self.high_level_api_wrap.drop_role(role) - roles, _ = self.high_level_api_wrap.list_roles() + role_info, _ = self.describe_role(client, role) + if role_info: + for privilege in role_info.get("privileges", []): + self.revoke_privilege(client, role, privilege["object_type"], + privilege["privilege"], privilege["object_name"]) + self.drop_role(client, role) + roles, _ = self.list_roles(client) assert len(roles) == 2 super().teardown_method(method) @@ -77,9 +69,9 @@ def test_milvus_client_connect_using_token(self, host, port): expected: init successfully """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) # check success link - res = client_w.list_collections(client)[0] + res = self.list_collections(client)[0] assert res == [] def test_milvus_client_connect_using_user_password(self, host, port): @@ -89,10 +81,10 @@ def test_milvus_client_connect_using_user_password(self, host, port): expected: init successfully """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, - password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, + password=ct.default_password) # check success link - res = client_w.list_collections(client)[0] + res = self.list_collections(client)[0] assert res == [] def test_milvus_client_create_user(self, host, port): @@ -102,13 +94,13 @@ def test_milvus_client_create_user(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) + self.create_user(client, user_name=user_name, password=password) # check - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=user_name, password=password) - res = client_w.list_collections(client)[0] + client, _ = self.init_milvus_client(uri=uri, user=user_name, password=password) + res = self.list_collections(client)[0] assert res == [] def test_milvus_client_drop_user(self, host, port): @@ -118,15 +110,15 @@ def test_milvus_client_drop_user(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) + self.create_user(client, user_name=user_name, password=password) # drop user that exists - self.high_level_api_wrap.drop_user(user_name=user_name) + self.drop_user(client, user_name=user_name) # drop user that not exists not_exist_user_name = cf.gen_unique_str(user_pre) - self.high_level_api_wrap.drop_user(user_name=not_exist_user_name) + self.drop_user(client, user_name=not_exist_user_name) def test_milvus_client_update_password(self, host, port): """ @@ -135,18 +127,18 @@ def test_milvus_client_update_password(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) + self.create_user(client, user_name=user_name, password=password) new_password = cf.gen_str_by_length() - self.high_level_api_wrap.update_password(user_name=user_name, old_password=password, new_password=new_password) + self.update_password(client, user_name=user_name, old_password=password, new_password=new_password) # check - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=user_name, password=new_password) - res = client_w.list_collections(client)[0] + client, _ = self.init_milvus_client(uri=uri, user=user_name, password=new_password) + res = self.list_collections(client)[0] assert len(res) == 0 - self.high_level_api_wrap.init_milvus_client(uri=uri, user=user_name, password=password, - check_task=CheckTasks.check_permission_deny) + self.init_milvus_client(uri=uri, user=user_name, password=password, + check_task=CheckTasks.check_auth_failure) def test_milvus_client_list_users(self, host, port): """ @@ -155,13 +147,13 @@ def test_milvus_client_list_users(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name1 = cf.gen_unique_str(user_pre) user_name2 = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name1, password=password) - self.high_level_api_wrap.create_user(user_name=user_name2, password=password) - res = self.high_level_api_wrap.list_users()[0] + self.create_user(client, user_name=user_name1, password=password) + self.create_user(client, user_name=user_name2, password=password) + res = self.list_users(client)[0] assert {ct.default_user, user_name1, user_name2}.issubset(set(res)) is True def test_milvus_client_describe_user(self, host, port): @@ -171,19 +163,19 @@ def test_milvus_client_describe_user(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) + self.create_user(client, user_name=user_name, password=password) # describe one self - res, _ = self.high_level_api_wrap.describe_user(user_name=ct.default_user) + res, _ = self.describe_user(client, user_name=ct.default_user) assert res["user_name"] == ct.default_user # describe other users - res, _ = self.high_level_api_wrap.describe_user(user_name=user_name) + res, _ = self.describe_user(client, user_name=user_name) assert res["user_name"] == user_name # describe user that not exists user_not_exist = cf.gen_unique_str(user_pre) - res, _ = self.high_level_api_wrap.describe_user(user_name=user_not_exist) + res, _ = self.describe_user(client, user_name=user_not_exist) assert res == {} def test_milvus_client_create_role(self, host, port): @@ -193,9 +185,9 @@ def test_milvus_client_create_role(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) + self.create_role(client, role_name=role_name) def test_milvus_client_drop_role(self, host, port): """ @@ -204,10 +196,10 @@ def test_milvus_client_drop_role(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) - self.high_level_api_wrap.drop_role(role_name=role_name) + self.create_role(client, role_name=role_name) + self.drop_role(client, role_name=role_name) def test_milvus_client_describe_role(self, host, port): """ @@ -216,11 +208,11 @@ def test_milvus_client_describe_role(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) + self.create_role(client, role_name=role_name) # describe a role that exists - self.high_level_api_wrap.describe_role(role_name=role_name) + self.describe_role(client, role_name=role_name) def test_milvus_client_list_roles(self, host, port): """ @@ -229,10 +221,10 @@ def test_milvus_client_list_roles(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) - res, _ = self.high_level_api_wrap.list_roles() + self.create_role(client, role_name=role_name) + res, _ = self.list_roles(client) assert role_name in res def test_milvus_client_grant_role(self, host, port): @@ -242,13 +234,13 @@ def test_milvus_client_grant_role(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.create_role(role_name=role_name) - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name) + self.create_user(client, user_name=user_name, password=password) + self.create_role(client, role_name=role_name) + self.grant_role(client, user_name=user_name, role_name=role_name) def test_milvus_client_revoke_role(self, host, port): """ @@ -257,18 +249,19 @@ def test_milvus_client_revoke_role(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.create_role(role_name=role_name) + self.create_user(client, user_name=user_name, password=password) + self.create_role(client, role_name=role_name) # revoke a user that does not exist - self.high_level_api_wrap.revoke_role(user_name=user_name, role_name=role_name) + self.revoke_role(client, user_name=user_name, role_name=role_name) # revoke a user that exists - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name) - self.high_level_api_wrap.revoke_role(user_name=user_name, role_name=role_name) + self.grant_role(client, user_name=user_name, role_name=role_name) + self.revoke_role(client, user_name=user_name, role_name=role_name) + @pytest.mark.skip("TODO: need update for new privilege") def test_milvus_client_grant_privilege(self, host, port): """ target: test milvus client api grant_privilege @@ -277,29 +270,30 @@ def test_milvus_client_grant_privilege(self, host, port): """ # prepare a collection uri = f"http://{host}:{port}" - client_root, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client_root, _ = self.init_milvus_client(uri=uri, token=root_token) coll_name = cf.gen_unique_str() - client_w.create_collection(client_root, coll_name, default_dim, consistency_level="Strong") + self.create_collection(client_root, coll_name, default_dim, consistency_level="Strong") # create a new role and a new user ( no privilege) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.create_role(role_name=role_name) - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name) + self.create_user(client_root, user_name=user_name, password=password) + self.create_role(client_root, role_name=role_name) + self.grant_role(client_root, user_name=user_name, role_name=role_name) - # check the role has no privilege of drop collection - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=user_name, password=password) - self.high_level_api_wrap.drop_collection(client, coll_name, check_task=CheckTasks.check_permission_deny) + # check the client role has no privilege of drop collection + client, _ = self.init_milvus_client(uri=uri, user=user_name, password=password) + self.drop_collection(client, coll_name, check_task=CheckTasks.check_permission_deny) - # grant the role with the privilege of drop collection - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.grant_privilege(role_name, "Global", "*", "DropCollection") + # client_root grants the role with the privilege of drop collection to the client role + # self.init_milvus_client(uri=uri, token=root_token) + self.grant_privilege(client_root, role_name, "Global", "*", "DropCollection") - # check the role has privilege of drop collection - self.high_level_api_wrap.drop_collection(client, coll_name) + # check the client role has privilege of drop collection + self.drop_collection(client, coll_name) + @pytest.mark.skip("TODO: need update for new privilege") def test_milvus_client_revoke_privilege(self, host, port): """ target: test milvus client api revoke_privilege @@ -308,35 +302,36 @@ def test_milvus_client_revoke_privilege(self, host, port): """ # prepare a collection uri = f"http://{host}:{port}" - client_root, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client_root, _ = self.init_milvus_client(uri=uri, token=root_token) coll_name = cf.gen_unique_str() # create a new role and a new user ( no privilege) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.create_role(role_name=role_name) - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name) - self.high_level_api_wrap.grant_privilege(role_name, "Global", "*", "CreateCollection") + self.create_user(client_root, user_name=user_name, password=password) + self.create_role(client_root, role_name=role_name) + self.grant_role(client_root, user_name=user_name, role_name=role_name) + self.grant_privilege(client_root, role_name, "Global", "*", "CreateCollection") time.sleep(60) # check the role has privilege of create collection - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=user_name, password=password) - client_w.create_collection(client, coll_name, default_dim, consistency_level="Strong") + client, _ = self.init_milvus_client(uri=uri, user=user_name, password=password) + self.create_collection(client, coll_name, default_dim, consistency_level="Strong") # revoke the role with the privilege of create collection - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.revoke_privilege(role_name, "Global", "*", "CreateCollection") + # self.init_milvus_client(uri=uri, token=root_token) + self.revoke_privilege(client_root, role_name, "Global", "*", "CreateCollection") # check the role has no privilege of create collection - self.high_level_api_wrap.create_collection(client, coll_name, default_dim, consistency_level="Strong", - check_task=CheckTasks.check_permission_deny) + self.create_collection(client, coll_name, default_dim, consistency_level="Strong", + check_task=CheckTasks.check_permission_deny) @pytest.mark.tags(CaseLabel.RBAC) -class TestMilvusClientRbacInvalid(TestcaseBase): +class TestMilvusClientRbacInvalid(TestMilvusClientV2Base): """ Test case of rbac interface """ + def test_milvus_client_init_token_invalid(self, host, port): """ target: test milvus client api token invalid @@ -345,8 +340,8 @@ def test_milvus_client_init_token_invalid(self, host, port): """ uri = f"http://{host}:{port}" wrong_token = root_token + "kk" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=wrong_token, - check_task=CheckTasks.check_auth_failure) + self.init_milvus_client(uri=uri, token=wrong_token, + check_task=CheckTasks.check_auth_failure) def test_milvus_client_init_username_invalid(self, host, port): """ @@ -356,9 +351,9 @@ def test_milvus_client_init_username_invalid(self, host, port): """ uri = f"http://{host}:{port}" invalid_user_name = ct.default_user + "nn" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=invalid_user_name, - password=ct.default_password, - check_task=CheckTasks.check_auth_failure) + self.init_milvus_client(uri=uri, user=invalid_user_name, + password=ct.default_password, + check_task=CheckTasks.check_auth_failure) def test_milvus_client_init_password_invalid(self, host, port): """ @@ -368,9 +363,9 @@ def test_milvus_client_init_password_invalid(self, host, port): """ uri = f"http://{host}:{port}" wrong_password = ct.default_password + "kk" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, - password=wrong_password, - check_task=CheckTasks.check_auth_failure) + self.init_milvus_client(uri=uri, user=ct.default_user, + password=wrong_password, + check_task=CheckTasks.check_auth_failure) @pytest.mark.parametrize("invalid_name", ["", "0", "n@me", "h h"]) def test_milvus_client_create_user_value_invalid(self, host, port, invalid_name): @@ -380,11 +375,13 @@ def test_milvus_client_create_user_value_invalid(self, host, port, invalid_name) expected: raise exception """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.create_user(invalid_name, ct.default_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 1100, - ct.err_msg: "invalid user name"}) + client, _ = self.init_milvus_client(uri=uri, token=root_token) + error_msg = "invalid user name" + if invalid_name == "": + error_msg = "username must be not empty" + self.create_user(client, invalid_name, ct.default_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 1100, ct.err_msg: error_msg}) @pytest.mark.parametrize("invalid_name", [1, [], None, {}]) def test_milvus_client_create_user_type_invalid(self, host, port, invalid_name): @@ -394,11 +391,11 @@ def test_milvus_client_create_user_type_invalid(self, host, port, invalid_name): expected: raise exception """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.create_user(invalid_name, ct.default_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 1, - ct.err_msg: "invalid user name"}) + client, _ = self.init_milvus_client(uri=uri, token=root_token) + error_msg = f"`user` value {invalid_name} is illegal" + self.create_user(client, invalid_name, ct.default_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 1, ct.err_msg: error_msg}) def test_milvus_client_create_user_exist(self, host, port): """ @@ -407,11 +404,11 @@ def test_milvus_client_create_user_exist(self, host, port): expected: raise exception """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.create_user("root", ct.default_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, - ct.err_msg: "user already exists: root"}) + client, _ = self.init_milvus_client(uri=uri, token=root_token) + self.create_user(client, "root", ct.default_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 65535, + ct.err_msg: "user already exists: root"}) @pytest.mark.parametrize("invalid_password", ["", "0", "p@ss", "h h", "1+1=2"]) def test_milvus_client_create_user_password_invalid_value(self, host, port, invalid_password): @@ -422,11 +419,11 @@ def test_milvus_client_create_user_password_invalid_value(self, host, port, inva """ uri = f"http://{host}:{port}" user_name = cf.gen_unique_str(user_pre) - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.create_user(user_name, invalid_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 1100, - ct.err_msg: "invalid password"}) + client, _ = self.init_milvus_client(uri=uri, token=root_token) + self.create_user(client, user_name, invalid_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 1100, + ct.err_msg: "invalid password"}) @pytest.mark.parametrize("invalid_password", [1, [], None, {}]) def test_milvus_client_create_user_password_invalid_type(self, host, port, invalid_password): @@ -437,11 +434,12 @@ def test_milvus_client_create_user_password_invalid_type(self, host, port, inval """ uri = f"http://{host}:{port}" user_name = cf.gen_unique_str(user_pre) - self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) - self.high_level_api_wrap.create_user(user_name, invalid_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 1, - ct.err_msg: "invalid password"}) + client, _ = self.init_milvus_client(uri=uri, token=root_token) + error_msg = f"`password` value {invalid_password} is illegal" + self.create_user(client, user_name, invalid_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 1, + ct.err_msg: error_msg}) def test_milvus_client_update_password_user_not_exist(self, host, port): """ @@ -450,15 +448,15 @@ def test_milvus_client_update_password_user_not_exist(self, host, port): expected: raise exception """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() new_password = cf.gen_str_by_length() - self.high_level_api_wrap.update_password(user_name=user_name, old_password=password, new_password=new_password, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 1400, - ct.err_msg: "old password not correct for %s: " - "not authenticated" % user_name}) + self.update_password(client, user_name=user_name, old_password=password, new_password=new_password, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 1400, + ct.err_msg: "old password not correct for %s: " + "not authenticated" % user_name}) def test_milvus_client_update_password_password_wrong(self, host, port): """ @@ -467,17 +465,17 @@ def test_milvus_client_update_password_password_wrong(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) + self.create_user(client, user_name=user_name, password=password) new_password = cf.gen_str_by_length() wrong_password = password + 'kk' - self.high_level_api_wrap.update_password(user_name=user_name, old_password=wrong_password, - new_password=new_password, check_task=CheckTasks.err_res, - check_items={ct.err_code: 1400, - ct.err_msg: "old password not correct for %s: " - "not authenticated" % user_name}) + self.update_password(client, user_name=user_name, old_password=wrong_password, + new_password=new_password, check_task=CheckTasks.err_res, + check_items={ct.err_code: 1400, + ct.err_msg: "old password not correct for %s: " + "not authenticated" % user_name}) def test_milvus_client_update_password_new_password_same(self, host, port): """ @@ -486,11 +484,11 @@ def test_milvus_client_update_password_new_password_same(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.update_password(user_name=user_name, old_password=password, new_password=password) + self.create_user(client, user_name=user_name, password=password) + self.update_password(client, user_name=user_name, old_password=password, new_password=password) @pytest.mark.parametrize("invalid_password", ["", "0", "p@ss", "h h", "1+1=2"]) def test_milvus_client_update_password_new_password_invalid(self, host, port, invalid_password): @@ -500,14 +498,14 @@ def test_milvus_client_update_password_new_password_invalid(self, host, port, in expected: succeed """ uri = f"http://{host}:{port}" - self.high_level_api_wrap.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) + client, _ = self.init_milvus_client(uri=uri, user=ct.default_user, password=ct.default_password) user_name = cf.gen_unique_str(user_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.update_password(user_name=user_name, old_password=password, - new_password=invalid_password, check_task=CheckTasks.err_res, - check_items={ct.err_code: 1100, - ct.err_msg: "invalid password"}) + self.create_user(client, user_name=user_name, password=password) + self.update_password(client, user_name=user_name, old_password=password, + new_password=invalid_password, check_task=CheckTasks.err_res, + check_items={ct.err_code: 1100, + ct.err_msg: "invalid password"}) def test_milvus_client_create_role_invalid(self, host, port): """ @@ -516,18 +514,14 @@ def test_milvus_client_create_role_invalid(self, host, port): expected: raise exception """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) - # create existed role - error_msg = f"role [name:{role_pre}] already exists" - self.high_level_api_wrap.create_role(role_name=role_name, check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, ct.err_msg: error_msg}) - # create role public or admin - self.high_level_api_wrap.create_role(role_name="public", check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, ct.err_msg: error_msg}) - self.high_level_api_wrap.create_role(role_name="admin", check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, ct.err_msg: error_msg}) + self.create_role(client, role_name=role_name) + # create existed role and system reserved role + for name in ["admin", "public", role_name]: + error_msg = f'role [name:"{name}"] already exists' + self.create_role(client, role_name=name, check_task=CheckTasks.err_res, + check_items={ct.err_code: 65535, ct.err_msg: error_msg}) def test_milvus_client_drop_role_invalid(self, host, port): """ @@ -536,12 +530,12 @@ def test_milvus_client_drop_role_invalid(self, host, port): expected: raise exception """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.drop_role(role_name=role_name, check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, - ct.err_msg: "not found the role, maybe the role isn't " - "existed or internal system error"}) + self.drop_role(client, role_name=role_name, check_task=CheckTasks.err_res, + check_items={ct.err_code: 65535, + ct.err_msg: "not found the role, maybe the role isn't " + "existed or internal system error"}) def test_milvus_client_describe_role_invalid(self, host, port): """ @@ -550,12 +544,12 @@ def test_milvus_client_describe_role_invalid(self, host, port): expected: raise exception """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) # describe a role that does not exist role_not_exist = cf.gen_unique_str(role_pre) error_msg = "not found the role, maybe the role isn't existed or internal system error" - self.high_level_api_wrap.describe_role(role_name=role_not_exist, check_task=CheckTasks.err_res, - check_items={ct.err_code: 65535, ct.err_msg: error_msg}) + self.describe_role(client, role_name=role_not_exist, check_task=CheckTasks.err_res, + check_items={ct.err_code: 65535, ct.err_msg: error_msg}) def test_milvus_client_grant_role_user_not_exist(self, host, port): """ @@ -564,15 +558,15 @@ def test_milvus_client_grant_role_user_not_exist(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) - self.high_level_api_wrap.create_role(role_name=role_name) - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 65536, - ct.err_msg: "not found the user, maybe the user " - "isn't existed or internal system error"}) + self.create_role(client, role_name=role_name) + self.grant_role(client, user_name=user_name, role_name=role_name, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 65536, + ct.err_msg: "not found the user, maybe the user " + "isn't existed or internal system error"}) def test_milvus_client_grant_role_role_not_exist(self, host, port): """ @@ -581,20 +575,20 @@ def test_milvus_client_grant_role_role_not_exist(self, host, port): expected: succeed """ uri = f"http://{host}:{port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) user_name = cf.gen_unique_str(user_pre) role_name = cf.gen_unique_str(role_pre) password = cf.gen_str_by_length() - self.high_level_api_wrap.create_user(user_name=user_name, password=password) - self.high_level_api_wrap.grant_role(user_name=user_name, role_name=role_name, - check_task=CheckTasks.err_res, - check_items={ct.err_code: 65536, - ct.err_msg: "not found the role, maybe the role " - "isn't existed or internal system error"}) + self.create_user(client, user_name=user_name, password=password) + self.grant_role(client, user_name=user_name, role_name=role_name, + check_task=CheckTasks.err_res, + check_items={ct.err_code: 65536, + ct.err_msg: "not found the role, maybe the role " + "isn't existed or internal system error"}) @pytest.mark.tags(CaseLabel.RBAC) -class TestMilvusClientRbacAdvance(TestcaseBase): +class TestMilvusClientRbacAdvance(TestMilvusClientV2Base): """ Test case of rbac interface """ def teardown_method(self, method): @@ -603,27 +597,27 @@ def teardown_method(self, method): """ log.info("[utility_teardown_method] Start teardown utility test cases ...") uri = f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" - client, _ = self.high_level_api_wrap.init_milvus_client(uri=uri, token=root_token) + client, _ = self.init_milvus_client(uri=uri, token=root_token) # drop users - users, _ = self.high_level_api_wrap.list_users() + users, _ = self.list_users(client) for user in users: if user != ct.default_user: - self.high_level_api_wrap.drop_user(user) - users, _ = self.high_level_api_wrap.list_users() + self.drop_user(client, user) + users, _ = self.list_users(client) assert len(users) == 1 # drop roles - roles, _ = self.high_level_api_wrap.list_roles() + roles, _ = self.list_roles(client) for role in roles: if role not in ['admin', 'public']: - privileges, _ = self.high_level_api_wrap.describe_role(role) + privileges, _ = self.describe_role(client, role) if privileges: for privilege in privileges: - self.high_level_api_wrap.revoke_privilege(role, privilege["object_type"], - privilege["privilege"], privilege["object_name"]) - self.high_level_api_wrap.drop_role(role) - roles, _ = self.high_level_api_wrap.list_roles() + self.revoke_privilege(client, role, privilege["object_type"], + privilege["privilege"], privilege["object_name"]) + self.drop_role(client, role) + roles, _ = self.list_roles(client) assert len(roles) == 2 super().teardown_method(method) diff --git a/tests/python_client/milvus_client/test_milvus_client_search.py b/tests/python_client/milvus_client/test_milvus_client_search.py index f57429278485a..0cb5f6c6e1daf 100644 --- a/tests/python_client/milvus_client/test_milvus_client_search.py +++ b/tests/python_client/milvus_client/test_milvus_client_search.py @@ -1,28 +1,15 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY from pymilvus import DataType -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() -prefix = "milvus_client_api_search" +prefix = "client_search" epsilon = ct.epsilon default_nb = ct.default_nb default_nb_medium = ct.default_nb_medium @@ -47,7 +34,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestMilvusClientSearchInvalid(TestcaseBase): +class TestMilvusClientSearchInvalid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -72,12 +59,12 @@ def test_milvus_client_collection_invalid_primary_field(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"} - client_w.create_collection(client, collection_name, default_dim, id_type="invalid", - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, id_type="invalid", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_collection_string_auto_id(self): @@ -86,13 +73,13 @@ def test_milvus_client_collection_string_auto_id(self): method: create collection with auto id on string primary key expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar " f"field of collection {collection_name}"} - client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_create_same_collection_different_params(self): @@ -102,18 +89,18 @@ def test_milvus_client_create_same_collection_different_params(self): expected: 1. Successfully to create collection with same params 2. Report errors for creating collection with same name and different params """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. create collection with same params - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 3. create collection with same name and different params error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, " f"collection: {collection_name}"} - client_w.create_collection(client, collection_name, default_dim+1, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim + 1, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_collection_invalid_metric_type(self): @@ -122,14 +109,14 @@ def test_milvus_client_collection_invalid_metric_type(self): method: create collection with auto id on string primary key expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 1100, ct.err_msg: "float vector index does not support metric type: invalid: " "invalid parameter[expected=valid index params][actual=invalid index params]"} - client_w.create_collection(client, collection_name, default_dim, metric_type="invalid", - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, metric_type="invalid", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("https://github.com/milvus-io/milvus/issues/29880") @@ -139,23 +126,23 @@ def test_milvus_client_search_not_consistent_metric_type(self, metric_type): method: create connection, collection, insert and search with not consistent metric type expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim) + self.create_collection(client, collection_name, default_dim) # 2. search rng = np.random.default_rng(seed=19530) vectors_to_search = rng.random((1, 8)) search_params = {"metric_type": metric_type} error = {ct.err_code: 1100, ct.err_msg: f"metric type not match: invalid parameter[expected=IP][actual={metric_type}]"} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - search_params=search_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + search_params=search_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) -class TestMilvusClientSearchValid(TestcaseBase): +class TestMilvusClientSearchValid(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -179,42 +166,42 @@ def test_milvus_client_search_query_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) - client_w.using_database(client, "default") + self.using_database(client, "default") # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Bounded") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Bounded") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) - client_w.flush(client, collection_name) - # assert client_w.num_entities(client, collection_name)[0] == default_nb + self.insert(client, collection_name, rows) + self.flush(client, collection_name) + # assert self.num_entities(client, collection_name)[0] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.release_collection(client, collection_name) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.release_collection(client, collection_name) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) @pytest.mark.skip(reason="issue #36484") @@ -225,27 +212,29 @@ def test_milvus_client_search_query_self_creation_default(self, nullable): method: create collection expected: create collection with default schema, index, and load successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) dim = 128 # 1. create collection - schema = client_w.create_schema(client, enable_dynamic_field=False)[0] - schema.add_field(default_primary_key_field_name, DataType.VARCHAR, max_length=64, is_primary=True, auto_id = False) + schema = self.create_schema(client, enable_dynamic_field=False)[0] + schema.add_field(default_primary_key_field_name, DataType.VARCHAR, max_length=64, is_primary=True, + auto_id=False) schema.add_field(default_vector_field_name, DataType.FLOAT_VECTOR, dim=dim) schema.add_field(default_string_field_name, DataType.VARCHAR, max_length=64, is_partition_key=True) schema.add_field("nullable_field", DataType.INT64, nullable=True, default_value=10) schema.add_field("array_field", DataType.ARRAY, element_type=DataType.INT64, max_capacity=12, max_length=64, nullable=True) - index_params = client_w.prepare_index_params(client)[0] + index_params = self.prepare_index_params(client)[0] index_params.add_index(default_vector_field_name, metric_type="COSINE") - client_w.create_collection(client, collection_name, dimension=dim, schema=schema, index_params=index_params) + self.create_collection(client, collection_name, dimension=dim, schema=schema, index_params=index_params) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_string_field_name: str(i), "nullable_field": None, "array_field": None} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) - if client_w.has_collection(client, collection_name)[0]: - client_w.drop_collection(client, collection_name) + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_string_field_name: str(i), "nullable_field": None, "array_field": None} for i in range(default_nb)] + self.insert(client, collection_name, rows) + if self.has_collection(client, collection_name)[0]: + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_rename_search_query_default(self): @@ -254,44 +243,44 @@ def test_milvus_client_rename_search_query_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Bounded") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Bounded") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, + "consistency_level": 0}) old_name = collection_name new_name = collection_name + "new" - client_w.rename_collection(client, old_name, new_name) + self.rename_collection(client, old_name, new_name) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, new_name, rows) - client_w.flush(client, new_name) - # assert client_w.num_entities(client, collection_name)[0] == default_nb + self.insert(client, new_name, rows) + self.flush(client, new_name) + # assert self.num_entities(client, collection_name)[0] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, new_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, new_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, new_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.release_collection(client, new_name) - client_w.drop_collection(client, new_name) + self.query(client, new_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.release_collection(client, new_name) + self.drop_collection(client, new_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_array_insert_search(self): @@ -300,11 +289,11 @@ def test_milvus_client_array_insert_search(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections # 2. insert rng = np.random.default_rng(seed=19530) @@ -312,19 +301,19 @@ def test_milvus_client_array_insert_search(self): default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, - default_int32_array_field_name: [i, i+1, i+2], + default_int32_array_field_name: [i, i + 1, i + 2], default_string_array_field_name: [str(i), str(i + 1), str(i + 2)] } for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="issue 25110") @@ -334,36 +323,36 @@ def test_milvus_client_search_query_string(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "auto_id": auto_id}) + self.create_collection(client, collection_name, default_dim, id_type="string", max_length=ct.default_length) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim}) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) - client_w.flush(client, collection_name) - assert client_w.num_entities(client, collection_name)[0] == default_nb + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + self.insert(client, collection_name, rows) + self.flush(client, collection_name) + assert self.num_entities(client, collection_name)[0] == default_nb # 3. search vectors_to_search = rng.random((1, default_dim)) - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_milvus_client_search_different_metric_types_not_specifying_in_search_params(self, metric_type, auto_id): @@ -372,11 +361,11 @@ def test_milvus_client_search_different_metric_types_not_specifying_in_search_pa method: create connection, collection, insert and search expected: search successfully with limit(topK) """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, - consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, + consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -384,17 +373,17 @@ def test_milvus_client_search_different_metric_types_not_specifying_in_search_pa if auto_id: for row in rows: row.pop(default_primary_key_field_name) - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) # search_params = {"metric_type": metric_type} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - output_fields=[default_primary_key_field_name], - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + output_fields=[default_primary_key_field_name], + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("pymilvus issue #1866") @@ -404,11 +393,11 @@ def test_milvus_client_search_different_metric_types_specifying_in_search_params method: create connection, collection, insert and search expected: search successfully with limit(topK) """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, - consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, auto_id=auto_id, + consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -416,18 +405,18 @@ def test_milvus_client_search_different_metric_types_specifying_in_search_params if auto_id: for row in rows: row.pop(default_primary_key_field_name) - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) search_params = {"metric_type": metric_type} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - search_params=search_params, - output_fields=[default_primary_key_field_name], - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + search_params=search_params, + output_fields=[default_primary_key_field_name], + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_ids(self): @@ -436,19 +425,19 @@ def test_milvus_client_delete_with_ids(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, ids=[i for i in range(delete_num)]) + self.delete(client, collection_name, ids=[i for i in range(delete_num)]) # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -456,19 +445,19 @@ def test_milvus_client_delete_with_ids(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_milvus_client_delete_with_filters(self): @@ -477,19 +466,19 @@ def test_milvus_client_delete_with_filters(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - pks = client_w.insert(client, collection_name, rows)[0] + pks = self.insert(client, collection_name, rows)[0] # 3. delete delete_num = 3 - client_w.delete(client, collection_name, filter=f"id < {delete_num}") + self.delete(client, collection_name, filter=f"id < {delete_num}") # 4. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -497,16 +486,69 @@ def test_milvus_client_delete_with_filters(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 5. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) \ No newline at end of file + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) + + @pytest.mark.tags(CaseLabel.L1) + def test_client_search_with_iterative_filter(self): + """ + target: test search with iterative filter + method: create connection, collection, insert, search with iterative filter + expected: search successfully + """ + client = self._client() + collection_name = cf.gen_unique_str(prefix) + # 1. create collection + schema = self.create_schema(client, enable_dynamic_field=False)[0] + dim = 32 + pk_field_name = 'id' + vector_field_name = 'embeddings' + str_field_name = 'title' + json_field_name = 'json_field' + max_length = 16 + schema.add_field(pk_field_name, DataType.INT64, is_primary=True, auto_id=False) + schema.add_field(vector_field_name, DataType.FLOAT_VECTOR, dim=dim) + schema.add_field(str_field_name, DataType.VARCHAR, max_length=max_length) + schema.add_field(json_field_name, DataType.JSON) + + index_params = self.prepare_index_params(client)[0] + index_params.add_index(field_name=vector_field_name, metric_type="COSINE", + index_type="IVF_FLAT", params={"nlist": 128}) + index_params.add_index(field_name=str_field_name) + self.create_collection(client, collection_name, schema=schema, index_params=index_params) + + rng = np.random.default_rng(seed=19530) + rows = [{ + pk_field_name: i, + vector_field_name: list(rng.random((1, dim))[0]), + str_field_name: cf.gen_str_by_length(max_length), + json_field_name: {"number": i} + } for i in range(default_nb)] + self.insert(client, collection_name, rows) + self.flush(client, collection_name) + self.load_collection(client, collection_name) + + # 3. search + search_vector = list(rng.random((1, dim))[0]) + search_params = {'hints': "iterative_filter", + 'params': cf.get_search_params_params('IVF_FLAT')} + self.search(client, collection_name, data=[search_vector], filter='id >= 10', + search_params=search_params, limit=default_limit) + not_supported_hints = "not_supported_hints" + error = {ct.err_code: 0, + ct.err_msg: f"Create Plan by expr failed: => hints: {not_supported_hints} not supported"} + search_params = {'hints': not_supported_hints, + 'params': cf.get_search_params_params('IVF_FLAT')} + self.search(client, collection_name, data=[search_vector], filter='id >= 10', + search_params=search_params, check_task=CheckTasks.err_res, check_items=error) diff --git a/tests/python_client/requirements.txt b/tests/python_client/requirements.txt index e29aea109cf63..fc8cf21154cfd 100644 --- a/tests/python_client/requirements.txt +++ b/tests/python_client/requirements.txt @@ -28,8 +28,8 @@ pytest-parallel pytest-random-order # pymilvus -pymilvus==2.5.2rc3 -pymilvus[bulk_writer]==2.5.2rc3 +pymilvus==2.6.0rc44 +pymilvus[bulk_writer]==2.6.0rc44 # for customize config test diff --git a/tests/python_client/testcases/async_milvus_client/test_e2e_async.py b/tests/python_client/testcases/async_milvus_client/test_e2e_async.py index 827282e5436aa..e5fb3eed1a5e2 100644 --- a/tests/python_client/testcases/async_milvus_client/test_e2e_async.py +++ b/tests/python_client/testcases/async_milvus_client/test_e2e_async.py @@ -5,23 +5,23 @@ from pymilvus.client.types import LoadState, DataType from pymilvus import AnnSearchRequest, RRFRanker -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_log import test_log as log pytestmark = pytest.mark.asyncio - prefix = "async" async_default_nb = 5000 default_pk_name = "id" default_vector_name = "vector" -class TestAsyncMilvusClient(TestcaseBase): +class TestAsyncMilvusClient(TestMilvusClientV2Base): def teardown_method(self, method): + self.init_async_milvus_client() loop = asyncio.get_event_loop() loop.run_until_complete(self.async_milvus_client_wrap.close()) super().teardown_method(method) @@ -29,13 +29,13 @@ def teardown_method(self, method): @pytest.mark.tags(CaseLabel.L0) async def test_async_client_default(self): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() self.init_async_milvus_client() # create collection c_name = cf.gen_unique_str(prefix) await self.async_milvus_client_wrap.create_collection(c_name, dimension=ct.default_dim) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections # insert entities @@ -116,17 +116,17 @@ async def test_async_client_default(self): @pytest.mark.tags(CaseLabel.L0) async def test_async_client_partition(self): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() self.init_async_milvus_client() # create collection & partition c_name = cf.gen_unique_str(prefix) p_name = cf.gen_unique_str("par") await self.async_milvus_client_wrap.create_collection(c_name, dimension=ct.default_dim) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections - self.high_level_api_wrap.create_partition(milvus_client, c_name, p_name) - partitions, _ = self.high_level_api_wrap.list_partitions(milvus_client, c_name) + self.create_partition(milvus_client, c_name, p_name) + partitions, _ = self.list_partitions(milvus_client, c_name) assert p_name in partitions # insert entities @@ -216,7 +216,7 @@ async def test_async_client_partition(self): @pytest.mark.tags(CaseLabel.L0) async def test_async_client_with_schema(self, schema): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() self.init_async_milvus_client() # create collection @@ -228,7 +228,7 @@ async def test_async_client_with_schema(self, schema): schema.add_field(ct.default_float_vec_field_name, DataType.FLOAT_VECTOR, dim=ct.default_dim) schema.add_field(default_vector_name, DataType.FLOAT_VECTOR, dim=ct.default_dim) await self.async_milvus_client_wrap.create_collection(c_name, schema=schema) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections # insert entities @@ -251,12 +251,12 @@ async def test_async_client_with_schema(self, schema): assert r[0]['insert_count'] == step # flush - self.high_level_api_wrap.flush(milvus_client, c_name) - stats, _ = self.high_level_api_wrap.get_collection_stats(milvus_client, c_name) + self.flush(milvus_client, c_name) + stats, _ = self.get_collection_stats(milvus_client, c_name) assert stats["row_count"] == async_default_nb # create index -> load - index_params, _ = self.high_level_api_wrap.prepare_index_params(milvus_client, + index_params, _ = self.prepare_index_params(milvus_client, field_name=ct.default_float_vec_field_name, index_type="HNSW", metric_type="COSINE", M=30, efConstruction=200) @@ -265,10 +265,10 @@ async def test_async_client_with_schema(self, schema): await self.async_milvus_client_wrap.create_index(c_name, index_params) await self.async_milvus_client_wrap.load_collection(c_name) - _index, _ = self.high_level_api_wrap.describe_index(milvus_client, c_name, default_vector_name) + _index, _ = self.describe_index(milvus_client, c_name, default_vector_name) assert _index["indexed_rows"] == async_default_nb assert _index["state"] == "Finished" - _load, _ = self.high_level_api_wrap.get_load_state(milvus_client, c_name) + _load, _ = self.get_load_state(milvus_client, c_name) assert _load["state"] == LoadState.Loaded # dql tasks @@ -320,13 +320,13 @@ async def test_async_client_with_schema(self, schema): @pytest.mark.tags(CaseLabel.L0) async def test_async_client_dml(self): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() self.init_async_milvus_client() # create collection c_name = cf.gen_unique_str(prefix) await self.async_milvus_client_wrap.create_collection(c_name, dimension=ct.default_dim) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections # insert entities @@ -377,10 +377,10 @@ async def test_async_client_dml(self): @pytest.mark.tags(CaseLabel.L2) async def test_async_client_with_db(self): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() db_name = cf.gen_unique_str("db") - self.high_level_api_wrap.create_database(milvus_client, db_name) - self.high_level_api_wrap.close(milvus_client) + self.create_database(milvus_client, db_name) + self.close(milvus_client) uri = cf.param_info.param_uri or f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" milvus_client, _ = self.connection_wrap.MilvusClient(uri=uri, db_name=db_name) self.async_milvus_client_wrap.init_async_client(uri, db_name=db_name) @@ -388,7 +388,7 @@ async def test_async_client_with_db(self): # create collection c_name = cf.gen_unique_str(prefix) await self.async_milvus_client_wrap.create_collection(c_name, dimension=ct.default_dim) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections # insert entities @@ -458,7 +458,7 @@ async def test_async_client_close(self): @pytest.mark.skip("connect with zilliz cloud") async def test_async_client_with_token(self): # init client - milvus_client = self._connect(enable_milvus_client_api=True) + milvus_client = self._client() uri = cf.param_info.param_uri or f"http://{cf.param_info.param_host}:{cf.param_info.param_port}" token = cf.param_info.param_token milvus_client, _ = self.connection_wrap.MilvusClient(uri=uri, token=token) @@ -467,7 +467,7 @@ async def test_async_client_with_token(self): # create collection c_name = cf.gen_unique_str(prefix) await self.async_milvus_client_wrap.create_collection(c_name, dimension=ct.default_dim) - collections, _ = self.high_level_api_wrap.list_collections(milvus_client) + collections, _ = self.list_collections(milvus_client) assert c_name in collections # insert entities diff --git a/tests/python_client/testcases/test_high_level_api.py b/tests/python_client/testcases/test_high_level_api.py index 14f5ba390ea40..0daa224df8012 100644 --- a/tests/python_client/testcases/test_high_level_api.py +++ b/tests/python_client/testcases/test_high_level_api.py @@ -1,25 +1,12 @@ -import multiprocessing -import numbers -import random -import numpy -import threading import pytest -import pandas as pd -import decimal -from decimal import Decimal, getcontext -from time import sleep -import heapq -from base.client_base import TestcaseBase +from base.client_v2_base import TestMilvusClientV2Base from utils.util_log import test_log as log from common import common_func as cf from common import common_type as ct from common.common_type import CaseLabel, CheckTasks from utils.util_pymilvus import * from common.constants import * -from pymilvus.orm.types import CONSISTENCY_STRONG, CONSISTENCY_BOUNDED, CONSISTENCY_SESSION, CONSISTENCY_EVENTUALLY -from base.high_level_api_wrapper import HighLevelApiWrapper -client_w = HighLevelApiWrapper() prefix = "high_level_api" epsilon = ct.epsilon @@ -46,7 +33,7 @@ default_string_array_field_name = ct.default_string_array_field_name -class TestHighLevelApi(TestcaseBase): +class TestHighLevelApi(TestMilvusClientV2Base): """ Test case of search interface """ @pytest.fixture(scope="function", params=[False, True]) @@ -71,12 +58,12 @@ def test_high_level_collection_invalid_primary_field(self): method: create collection with invalid primary field expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 1, ct.err_msg: f"Param id_type must be int or string"} - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong", - id_type="invalid", check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, consistency_level="Strong", + id_type="invalid", check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) def test_high_level_collection_string_auto_id(self): @@ -85,13 +72,13 @@ def test_high_level_collection_string_auto_id(self): method: create collection with auto id on string primary key expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: f"type param(max_length) should be specified for varChar " f"field of collection {collection_name}"} - client_w.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, id_type="string", auto_id=True, + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L1) def test_high_level_create_same_collection_different_params(self): @@ -101,18 +88,18 @@ def test_high_level_create_same_collection_different_params(self): expected: 1. Successfully to create collection with same params 2. Report errors for creating collection with same name and different params """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. create collection with same params - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 3. create collection with same name and different params error = {ct.err_code: 1, ct.err_msg: f"create duplicate collection with different parameters, " f"collection: {collection_name}"} - client_w.create_collection(client, collection_name, default_dim+1, consistency_level="Strong", - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.create_collection(client, collection_name, default_dim + 1, consistency_level="Strong", + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_high_level_collection_invalid_metric_type(self): @@ -121,13 +108,13 @@ def test_high_level_collection_invalid_metric_type(self): method: create collection with auto id on string primary key expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection error = {ct.err_code: 65535, ct.err_msg: "float vector index does not support metric type: invalid: invalid parameter"} - client_w.create_collection(client, collection_name, default_dim, metric_type="invalid", - check_task=CheckTasks.err_res, check_items=error) + self.create_collection(client, collection_name, default_dim, metric_type="invalid", + check_task=CheckTasks.err_res, check_items=error) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip("https://github.com/milvus-io/milvus/issues/29880") @@ -137,20 +124,20 @@ def test_high_level_search_not_consistent_metric_type(self, metric_type): method: create connection, collection, insert and search with not consistent metric type expected: Raise exception """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. search rng = np.random.default_rng(seed=19530) vectors_to_search = rng.random((1, 8)) search_params = {"metric_type": metric_type} error = {ct.err_code: 1100, ct.err_msg: f"metric type not match: invalid parameter[expected=IP][actual={metric_type}]"} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - search_params=search_params, - check_task=CheckTasks.err_res, check_items=error) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + search_params=search_params, + check_task=CheckTasks.err_res, check_items=error) + self.drop_collection(client, collection_name) """ ****************************************************************** @@ -165,39 +152,39 @@ def test_high_level_search_query_default(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, "consistency_level": 0}) + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim, "consistency_level": 0}) # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, + self.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_high_level_array_insert_search(self): @@ -206,11 +193,11 @@ def test_high_level_array_insert_search(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") - collections = client_w.list_collections(client)[0] + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") + collections = self.list_collections(client)[0] assert collection_name in collections # 2. insert rng = np.random.default_rng(seed=19530) @@ -218,20 +205,20 @@ def test_high_level_array_insert_search(self): default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, - default_int32_array_field_name: [i, i+1, i+2], + default_int32_array_field_name: [i, i + 1, i + 2], default_string_array_field_name: [str(i), str(i + 1), str(i + 2)] } for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": default_limit}) @pytest.mark.tags(CaseLabel.L2) @pytest.mark.skip(reason="issue 25110") @@ -241,36 +228,36 @@ def test_high_level_search_query_string(self): method: create connection, collection, insert and search expected: search/query successfully """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, id_type="string", - max_length=ct.default_length, consistency_level="Strong") - client_w.describe_collection(client, collection_name, - check_task=CheckTasks.check_describe_collection_property, - check_items={"collection_name": collection_name, - "dim": default_dim, - "auto_id": auto_id}) + self.create_collection(client, collection_name, default_dim, id_type="string", + max_length=ct.default_length, consistency_level="Strong") + self.describe_collection(client, collection_name, + check_task=CheckTasks.check_describe_collection_property, + check_items={"collection_name": collection_name, + "dim": default_dim}) # 2. insert rng = np.random.default_rng(seed=19530) - rows = [{default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), - default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows) + rows = [ + {default_primary_key_field_name: str(i), default_vector_field_name: list(rng.random((1, default_dim))[0]), + default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) - client_w.search(client, collection_name, vectors_to_search, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) + self.search(client, collection_name, vectors_to_search, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) # 4. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows, - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows, + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L2) def test_high_level_search_different_metric_types(self, metric_type, auto_id): @@ -279,11 +266,11 @@ def test_high_level_search_different_metric_types(self, metric_type, auto_id): method: create connection, collection, insert and search expected: search successfully with limit(topK) """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, metric_type=metric_type, - auto_id=auto_id, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, metric_type=metric_type, + auto_id=auto_id, consistency_level="Strong") # 2. insert rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), @@ -291,19 +278,19 @@ def test_high_level_search_different_metric_types(self, metric_type, auto_id): if auto_id: for row in rows: row.pop(default_primary_key_field_name) - client_w.insert(client, collection_name, rows) + self.insert(client, collection_name, rows) # 3. search vectors_to_search = rng.random((1, default_dim)) search_params = {"metric_type": metric_type} - client_w.search(client, collection_name, vectors_to_search, limit=default_limit, - search_params=search_params, - output_fields=[default_primary_key_field_name], - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "limit": default_limit}) - client_w.drop_collection(client, collection_name) + self.search(client, collection_name, vectors_to_search, limit=default_limit, + search_params=search_params, + output_fields=[default_primary_key_field_name], + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "limit": default_limit}) + self.drop_collection(client, collection_name) @pytest.mark.tags(CaseLabel.L1) def test_high_level_delete(self): @@ -312,22 +299,22 @@ def test_high_level_delete(self): method: create connection, collection, insert delete, and search expected: search/query successfully without deleted data """ - client = self._connect(enable_milvus_client_api=True) + client = self._client() collection_name = cf.gen_unique_str(prefix) # 1. create collection - client_w.create_collection(client, collection_name, default_dim, consistency_level="Strong") + self.create_collection(client, collection_name, default_dim, consistency_level="Strong") # 2. insert default_nb = 1000 rng = np.random.default_rng(seed=19530) rows = [{default_primary_key_field_name: i, default_vector_field_name: list(rng.random((1, default_dim))[0]), default_float_field_name: i * 1.0, default_string_field_name: str(i)} for i in range(default_nb)] - client_w.insert(client, collection_name, rows)[0] + self.insert(client, collection_name, rows) pks = [i for i in range(default_nb)] # 3. get first primary key - first_pk_data = client_w.get(client, collection_name, ids=pks[0:1]) + first_pk_data = self.get(client, collection_name, ids=pks[0:1]) # 4. delete delete_num = 3 - client_w.delete(client, collection_name, ids=pks[0:delete_num]) + self.delete(client, collection_name, ids=pks[0:delete_num]) # 5. search vectors_to_search = rng.random((1, default_dim)) insert_ids = [i for i in range(default_nb)] @@ -335,16 +322,16 @@ def test_high_level_delete(self): if insert_id in insert_ids: insert_ids.remove(insert_id) limit = default_nb - delete_num - client_w.search(client, collection_name, vectors_to_search, limit=default_nb, - check_task=CheckTasks.check_search_results, - check_items={"enable_milvus_client_api": True, - "nq": len(vectors_to_search), - "ids": insert_ids, - "limit": limit}) + self.search(client, collection_name, vectors_to_search, limit=default_nb, + check_task=CheckTasks.check_search_results, + check_items={"enable_milvus_client_api": True, + "nq": len(vectors_to_search), + "ids": insert_ids, + "limit": limit}) # 6. query - client_w.query(client, collection_name, filter=default_search_exp, - check_task=CheckTasks.check_query_results, - check_items={exp_res: rows[delete_num:], - "with_vec": True, - "primary_field": default_primary_key_field_name}) - client_w.drop_collection(client, collection_name) + self.query(client, collection_name, filter=default_search_exp, + check_task=CheckTasks.check_query_results, + check_items={exp_res: rows[delete_num:], + "with_vec": True, + "primary_field": default_primary_key_field_name}) + self.drop_collection(client, collection_name)