From fb0e689617148cc66761a961bdfec47066686d1f Mon Sep 17 00:00:00 2001 From: nico <109071306+NicoYuan1986@users.noreply.github.com> Date: Wed, 18 Dec 2024 10:10:57 +0800 Subject: [PATCH] test: update sdk version and add cases (#38478) Signed-off-by: nico --- tests/python_client/common/code_mapping.py | 2 +- tests/python_client/common/common_func.py | 10 ++--- tests/python_client/requirements.txt | 4 +- tests/python_client/testcases/test_index.py | 4 +- tests/python_client/testcases/test_issues.py | 40 +++++++++++++++++++- tests/python_client/testcases/test_search.py | 2 +- 6 files changed, 50 insertions(+), 12 deletions(-) diff --git a/tests/python_client/common/code_mapping.py b/tests/python_client/common/code_mapping.py index 355cb3baeeda3..337fffa07669d 100644 --- a/tests/python_client/common/code_mapping.py +++ b/tests/python_client/common/code_mapping.py @@ -40,7 +40,7 @@ class IndexErrorMessage(ExceptionsMessage): CheckBitmapIndex = "bitmap index are only supported on bool, int, string" CheckBitmapOnPK = "create bitmap index on primary key not supported" CheckBitmapCardinality = "failed to check bitmap cardinality limit, should be larger than 0 and smaller than 1000" - NotConfigable = "{0} is not configable index param" + NotConfigable = "{0} is not a configable index proptery" InvalidOffsetCache = "invalid offset cache index params" OneIndexPerField = "at most one distinct index is allowed per field" AlterOnLoadedCollection = "can't alter index on loaded collection, please release the collection first" diff --git a/tests/python_client/common/common_func.py b/tests/python_client/common/common_func.py index 90b4ca860a4e7..c808872467a0a 100644 --- a/tests/python_client/common/common_func.py +++ b/tests/python_client/common/common_func.py @@ -982,13 +982,13 @@ def gen_collection_schema_all_datatype(description=ct.default_desc, primary_fiel if ct.append_vector_type[i%3] != ct.sparse_vector: if default_value_fields.get(ct.append_vector_type[i%3]) is None: vector_field = gen_float_vec_field(name=f"multiple_vector_{ct.append_vector_type[i%3]}", - dim=multiple_dim_array[i], - vector_data_type=ct.append_vector_type[i%3]) + dim=multiple_dim_array[i], + vector_data_type=ct.append_vector_type[i%3]) else: vector_field = gen_float_vec_field(name=f"multiple_vector_{ct.append_vector_type[i%3]}", - dim=multiple_dim_array[i], - vector_data_type=ct.append_vector_type[i%3], - default_value=default_value_fields.get(ct.append_vector_type[i%3])) + dim=multiple_dim_array[i], + vector_data_type=ct.append_vector_type[i%3], + default_value=default_value_fields.get(ct.append_vector_type[i%3])) fields.append(vector_field) else: # The field of a sparse vector cannot be dimensioned diff --git a/tests/python_client/requirements.txt b/tests/python_client/requirements.txt index fcb8b22926e17..59d592c2ac710 100644 --- a/tests/python_client/requirements.txt +++ b/tests/python_client/requirements.txt @@ -27,8 +27,8 @@ pytest-parallel pytest-random-order # pymilvus -pymilvus==2.5.1rc9 -pymilvus[bulk_writer]==2.5.1rc9 +pymilvus==2.5.1rc14 +pymilvus[bulk_writer]==2.5.1rc14 # for customize config test diff --git a/tests/python_client/testcases/test_index.py b/tests/python_client/testcases/test_index.py index 0a66545330cad..66afe37830e8f 100644 --- a/tests/python_client/testcases/test_index.py +++ b/tests/python_client/testcases/test_index.py @@ -1428,7 +1428,7 @@ def test_alter_index_invalid(self): collection_w.alter_index(ct.default_index_name, {"error_param_key": 123}, check_task=CheckTasks.err_res, check_items={ct.err_code: 1100, - ct.err_msg: f"error_param_key is not configable index param"}) + ct.err_msg: "error_param_key is not a configable index proptery:"}) collection_w.alter_index(ct.default_index_name, ["error_param_type"], check_task=CheckTasks.err_res, check_items={ct.err_code: 1, @@ -1436,7 +1436,7 @@ def test_alter_index_invalid(self): collection_w.alter_index(ct.default_index_name, None, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, - ct.err_msg: f"extra_params should not be None"}) + ct.err_msg: "properties should not be None"}) collection_w.alter_index(ct.default_index_name, 1000, check_task=CheckTasks.err_res, check_items={ct.err_code: 1, diff --git a/tests/python_client/testcases/test_issues.py b/tests/python_client/testcases/test_issues.py index 4b79d253a6405..651e50d5dc6c7 100644 --- a/tests/python_client/testcases/test_issues.py +++ b/tests/python_client/testcases/test_issues.py @@ -9,6 +9,7 @@ class TestIssues(TestcaseBase): + @pytest.mark.tags(CaseLabel.L0) @pytest.mark.parametrize("par_key_field", [ct.default_int64_field_name]) @pytest.mark.parametrize("use_upsert", [True, False]) @@ -75,4 +76,41 @@ def test_issue_30607(self, par_key_field, use_upsert): log.info(f"dirty data found: pk {pk} with parkey {parkey_value}") dirty_count += 1 assert dirty_count == 0 - log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}") \ No newline at end of file + log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}") + + @pytest.mark.tags(CaseLabel.L2) + def test_issue_32294(self): + """ + Method: + 1. create a collection with partition key on collection schema with customized num_partitions + 2. randomly check 200 entities + 2. verify partition key values are hashed into correct partitions + """ + self._connect() + pk_field = cf.gen_int64_field(name='pk', is_primary=True) + string_field = cf.gen_string_field(name="metadata") + vector_field = cf.gen_float_vec_field() + schema = cf.gen_collection_schema(fields=[pk_field, string_field, vector_field], auto_id=True) + collection_w = self.init_collection_wrap(schema=schema) + + # insert + nb = 500 + string_values = [str(i) for i in range(0, nb)] + float_vec_values = gen_vectors(nb, ct.default_dim) + string_values[0] = ('{\n' + '"Header 1": "Foo1?", \n' + '"document_category": "acme", \n' + '"type": "passage"\n' + '}') + string_values[1] = '{"Header 1": "Foo1?", "document_category": "acme", "type": "passage"}' + data = [string_values, float_vec_values] + collection_w.insert(data) + collection_w.create_index(field_name=ct.default_float_vec_field_name, index_params=ct.default_index) + collection_w.load() + + expr = "metadata like '%passage%'" + collection_w.search(float_vec_values[-2:], ct.default_float_vec_field_name, {}, + ct.default_limit, expr, output_fields=["metadata"], + check_task=CheckTasks.check_search_results, + check_items={"nq": 2, + "limit": 2}) diff --git a/tests/python_client/testcases/test_search.py b/tests/python_client/testcases/test_search.py index 93584a2738688..aa75660a5a363 100644 --- a/tests/python_client/testcases/test_search.py +++ b/tests/python_client/testcases/test_search.py @@ -10628,7 +10628,7 @@ def test_search_group_by_unsupported_index(self, index): # search with groupby err_code = 999 - err_msg = "doesn't support search_group_by" + err_msg = f"current index:{index} doesn't support" collection_w.search(data=search_vectors, anns_field=ct.default_float_vec_field_name, param=search_params, limit=limit, group_by_field=ct.default_int8_field_name,