Skip to content

Commit

Permalink
test: update sdk version and add cases (milvus-io#38478)
Browse files Browse the repository at this point in the history
Signed-off-by: nico <[email protected]>
  • Loading branch information
NicoYuan1986 authored Dec 18, 2024
1 parent ffd3c5d commit fb0e689
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 12 deletions.
2 changes: 1 addition & 1 deletion tests/python_client/common/code_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ class IndexErrorMessage(ExceptionsMessage):
CheckBitmapIndex = "bitmap index are only supported on bool, int, string"
CheckBitmapOnPK = "create bitmap index on primary key not supported"
CheckBitmapCardinality = "failed to check bitmap cardinality limit, should be larger than 0 and smaller than 1000"
NotConfigable = "{0} is not configable index param"
NotConfigable = "{0} is not a configable index proptery"
InvalidOffsetCache = "invalid offset cache index params"
OneIndexPerField = "at most one distinct index is allowed per field"
AlterOnLoadedCollection = "can't alter index on loaded collection, please release the collection first"
Expand Down
10 changes: 5 additions & 5 deletions tests/python_client/common/common_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -982,13 +982,13 @@ def gen_collection_schema_all_datatype(description=ct.default_desc, primary_fiel
if ct.append_vector_type[i%3] != ct.sparse_vector:
if default_value_fields.get(ct.append_vector_type[i%3]) is None:
vector_field = gen_float_vec_field(name=f"multiple_vector_{ct.append_vector_type[i%3]}",
dim=multiple_dim_array[i],
vector_data_type=ct.append_vector_type[i%3])
dim=multiple_dim_array[i],
vector_data_type=ct.append_vector_type[i%3])
else:
vector_field = gen_float_vec_field(name=f"multiple_vector_{ct.append_vector_type[i%3]}",
dim=multiple_dim_array[i],
vector_data_type=ct.append_vector_type[i%3],
default_value=default_value_fields.get(ct.append_vector_type[i%3]))
dim=multiple_dim_array[i],
vector_data_type=ct.append_vector_type[i%3],
default_value=default_value_fields.get(ct.append_vector_type[i%3]))
fields.append(vector_field)
else:
# The field of a sparse vector cannot be dimensioned
Expand Down
4 changes: 2 additions & 2 deletions tests/python_client/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ pytest-parallel
pytest-random-order

# pymilvus
pymilvus==2.5.1rc9
pymilvus[bulk_writer]==2.5.1rc9
pymilvus==2.5.1rc14
pymilvus[bulk_writer]==2.5.1rc14


# for customize config test
Expand Down
4 changes: 2 additions & 2 deletions tests/python_client/testcases/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -1428,15 +1428,15 @@ def test_alter_index_invalid(self):
collection_w.alter_index(ct.default_index_name, {"error_param_key": 123},
check_task=CheckTasks.err_res,
check_items={ct.err_code: 1100,
ct.err_msg: f"error_param_key is not configable index param"})
ct.err_msg: "error_param_key is not a configable index proptery:"})
collection_w.alter_index(ct.default_index_name, ["error_param_type"],
check_task=CheckTasks.err_res,
check_items={ct.err_code: 1,
ct.err_msg: f"Unexpected error"})
collection_w.alter_index(ct.default_index_name, None,
check_task=CheckTasks.err_res,
check_items={ct.err_code: 1,
ct.err_msg: f"extra_params should not be None"})
ct.err_msg: "properties should not be None"})
collection_w.alter_index(ct.default_index_name, 1000,
check_task=CheckTasks.err_res,
check_items={ct.err_code: 1,
Expand Down
40 changes: 39 additions & 1 deletion tests/python_client/testcases/test_issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@


class TestIssues(TestcaseBase):

@pytest.mark.tags(CaseLabel.L0)
@pytest.mark.parametrize("par_key_field", [ct.default_int64_field_name])
@pytest.mark.parametrize("use_upsert", [True, False])
Expand Down Expand Up @@ -75,4 +76,41 @@ def test_issue_30607(self, par_key_field, use_upsert):
log.info(f"dirty data found: pk {pk} with parkey {parkey_value}")
dirty_count += 1
assert dirty_count == 0
log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}")
log.info(f"check randomly {seeds}/{num_entities}, dirty count={dirty_count}")

@pytest.mark.tags(CaseLabel.L2)
def test_issue_32294(self):
"""
Method:
1. create a collection with partition key on collection schema with customized num_partitions
2. randomly check 200 entities
2. verify partition key values are hashed into correct partitions
"""
self._connect()
pk_field = cf.gen_int64_field(name='pk', is_primary=True)
string_field = cf.gen_string_field(name="metadata")
vector_field = cf.gen_float_vec_field()
schema = cf.gen_collection_schema(fields=[pk_field, string_field, vector_field], auto_id=True)
collection_w = self.init_collection_wrap(schema=schema)

# insert
nb = 500
string_values = [str(i) for i in range(0, nb)]
float_vec_values = gen_vectors(nb, ct.default_dim)
string_values[0] = ('{\n'
'"Header 1": "Foo1?", \n'
'"document_category": "acme", \n'
'"type": "passage"\n'
'}')
string_values[1] = '{"Header 1": "Foo1?", "document_category": "acme", "type": "passage"}'
data = [string_values, float_vec_values]
collection_w.insert(data)
collection_w.create_index(field_name=ct.default_float_vec_field_name, index_params=ct.default_index)
collection_w.load()

expr = "metadata like '%passage%'"
collection_w.search(float_vec_values[-2:], ct.default_float_vec_field_name, {},
ct.default_limit, expr, output_fields=["metadata"],
check_task=CheckTasks.check_search_results,
check_items={"nq": 2,
"limit": 2})
2 changes: 1 addition & 1 deletion tests/python_client/testcases/test_search.py
Original file line number Diff line number Diff line change
Expand Up @@ -10628,7 +10628,7 @@ def test_search_group_by_unsupported_index(self, index):

# search with groupby
err_code = 999
err_msg = "doesn't support search_group_by"
err_msg = f"current index:{index} doesn't support"
collection_w.search(data=search_vectors, anns_field=ct.default_float_vec_field_name,
param=search_params, limit=limit,
group_by_field=ct.default_int8_field_name,
Expand Down

0 comments on commit fb0e689

Please sign in to comment.