Skip to content

Commit

Permalink
fix: Update error message and other behavior change
Browse files Browse the repository at this point in the history
This PR fix the failing e2e cases for master branch

- Error message changes for expression validation
- Skip timeout & unexpected message change
- Use current consistency level param to avoid iterator issues

Signed-off-by: Congqi Xia <[email protected]>
  • Loading branch information
congqixia committed Nov 7, 2024
1 parent 8008f14 commit 00002cb
Show file tree
Hide file tree
Showing 10 changed files with 678 additions and 397 deletions.
47 changes: 26 additions & 21 deletions client/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ func (c *GrpcClient) HybridSearch(ctx context.Context, collName string, partitio

// Search with bool expression
func (c *GrpcClient) Search(ctx context.Context, collName string, partitions []string,
expr string, outputFields []string, vectors []entity.Vector, vectorField string, metricType entity.MetricType, topK int, sp entity.SearchParam, opts ...SearchQueryOptionFunc) ([]SearchResult, error) {
expr string, outputFields []string, vectors []entity.Vector, vectorField string, metricType entity.MetricType, topK int, sp entity.SearchParam, opts ...SearchQueryOptionFunc,
) ([]SearchResult, error) {
if c.Service == nil {
return []SearchResult{}, ErrClientNotReady
}
Expand Down Expand Up @@ -243,7 +244,7 @@ func expandWildcard(schema *entity.Schema, outputFields []string) ([]string, boo

func PKs2Expr(backName string, ids entity.Column) string {
var expr string
var pkName = ids.Name()
pkName := ids.Name()
if ids.Name() == "" {
pkName = backName
}
Expand Down Expand Up @@ -320,12 +321,13 @@ func (c *GrpcClient) Query(ctx context.Context, collectionName string, partition
}

req := &milvuspb.QueryRequest{
DbName: "", // reserved field
CollectionName: collectionName,
Expr: expr,
OutputFields: outputFields,
PartitionNames: partitionNames,
GuaranteeTimestamp: option.GuaranteeTimestamp,
DbName: "", // reserved field
CollectionName: collectionName,
Expr: expr,
OutputFields: outputFields,
PartitionNames: partitionNames,
UseDefaultConsistency: option.UseDefaultConsistencyLevel,
ConsistencyLevel: option.ConsistencyLevel.CommonConsistencyLevel(),
}
if option.Offset > 0 {
req.QueryParams = append(req.QueryParams, &commonpb.KeyValuePair{Key: offsetKey, Value: strconv.FormatInt(option.Offset, 10)})
Expand Down Expand Up @@ -354,7 +356,7 @@ func (c *GrpcClient) Query(ctx context.Context, collectionName string, partition

fieldsData := resp.GetFieldsData()

columns, err := c.parseSearchResult(sch, outputFields, fieldsData, 0, 0, -1) //entity.FieldDataColumn(fieldData, 0, -1)
columns, err := c.parseSearchResult(sch, outputFields, fieldsData, 0, 0, -1) // entity.FieldDataColumn(fieldData, 0, -1)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -382,7 +384,8 @@ func getVectorField(schema *entity.Schema) *entity.Field {

func prepareSearchRequest(collName string, partitions []string,
expr string, outputFields []string, vectors []entity.Vector, vectorField string,
metricType entity.MetricType, topK int, sp entity.SearchParam, opt *SearchQueryOption) (*milvuspb.SearchRequest, error) {
metricType entity.MetricType, topK int, sp entity.SearchParam, opt *SearchQueryOption,
) (*milvuspb.SearchRequest, error) {
params := sp.Params()
params[forTuningKey] = opt.ForTuning
bs, err := json.Marshal(params)
Expand All @@ -406,16 +409,17 @@ func prepareSearchRequest(collName string, partitions []string,
searchParams := entity.MapKvPairs(spMap)

req := &milvuspb.SearchRequest{
DbName: "",
CollectionName: collName,
PartitionNames: partitions,
Dsl: expr,
PlaceholderGroup: vector2PlaceholderGroupBytes(vectors),
DslType: commonpb.DslType_BoolExprV1,
OutputFields: outputFields,
SearchParams: searchParams,
GuaranteeTimestamp: opt.GuaranteeTimestamp,
Nq: int64(len(vectors)),
DbName: "",
CollectionName: collName,
PartitionNames: partitions,
Dsl: expr,
PlaceholderGroup: vector2PlaceholderGroupBytes(vectors),
DslType: commonpb.DslType_BoolExprV1,
OutputFields: outputFields,
SearchParams: searchParams,
UseDefaultConsistency: opt.UseDefaultConsistencyLevel,
ConsistencyLevel: opt.ConsistencyLevel.CommonConsistencyLevel(),
Nq: int64(len(vectors)),
}
return req, nil
}
Expand Down Expand Up @@ -482,7 +486,8 @@ func (c *GrpcClient) GetQuerySegmentInfo(ctx context.Context, collName string) (
}

func (c *GrpcClient) CalcDistance(ctx context.Context, collName string, partitions []string,
metricType entity.MetricType, opLeft, opRight entity.Column) (entity.Column, error) {
metricType entity.MetricType, opLeft, opRight entity.Column,
) (entity.Column, error) {
if c.Service == nil {
return nil, ErrClientNotReady
}
Expand Down
9 changes: 6 additions & 3 deletions client/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,9 @@ func WithSkipDynamicFields(skip bool) LoadCollectionOption {
// SearchQueryOption is an option of search/query request
type SearchQueryOption struct {
// Consistency Level
ConsistencyLevel entity.ConsistencyLevel
GuaranteeTimestamp uint64
ConsistencyLevel entity.ConsistencyLevel
GuaranteeTimestamp uint64
UseDefaultConsistencyLevel bool
// Pagination
Limit int64
Offset int64
Expand Down Expand Up @@ -202,6 +203,7 @@ func WithGroupByField(groupByField string) SearchQueryOptionFunc {
// WithSearchQueryConsistencyLevel specifies consistency level
func WithSearchQueryConsistencyLevel(cl entity.ConsistencyLevel) SearchQueryOptionFunc {
return func(option *SearchQueryOption) {
option.UseDefaultConsistencyLevel = false
option.ConsistencyLevel = cl
}
}
Expand All @@ -220,7 +222,8 @@ func WithTravelTimestamp(_ uint64) SearchQueryOptionFunc {

func (c *GrpcClient) makeSearchQueryOption(collName string, opts ...SearchQueryOptionFunc) (*SearchQueryOption, error) {
opt := &SearchQueryOption{
ConsistencyLevel: entity.ClBounded, // default
ConsistencyLevel: entity.ClBounded, // default
UseDefaultConsistencyLevel: true,
}
info, ok := c.cache.getCollectionInfo(collName)
if ok {
Expand Down
72 changes: 40 additions & 32 deletions client/options_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: true,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -90,10 +91,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: true,
ForTuning: false,
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: true,
ForTuning: false,
UseDefaultConsistencyLevel: true,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -103,10 +105,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: false,
ForTuning: true,
ConsistencyLevel: entity.ClStrong,
GuaranteeTimestamp: StrongTimestamp,
IgnoreGrowing: false,
ForTuning: true,
UseDefaultConsistencyLevel: true,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -116,10 +119,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClSession,
GuaranteeTimestamp: EventuallyTimestamp,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClSession,
GuaranteeTimestamp: EventuallyTimestamp,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: false,
}
assert.Equal(t, expected, opt)

Expand All @@ -128,10 +132,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected = &SearchQueryOption{
ConsistencyLevel: entity.ClSession,
GuaranteeTimestamp: 99,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClSession,
GuaranteeTimestamp: 99,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: false,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -141,10 +146,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClBounded,
GuaranteeTimestamp: BoundedTimestamp,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClBounded,
GuaranteeTimestamp: BoundedTimestamp,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: false,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -154,10 +160,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClEventually,
GuaranteeTimestamp: EventuallyTimestamp,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClEventually,
GuaranteeTimestamp: EventuallyTimestamp,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: false,
}
assert.Equal(t, expected, opt)
})
Expand All @@ -167,10 +174,11 @@ func TestMakeSearchQueryOption(t *testing.T) {
assert.Nil(t, err)
assert.NotNil(t, opt)
expected := &SearchQueryOption{
ConsistencyLevel: entity.ClCustomized,
GuaranteeTimestamp: 100,
IgnoreGrowing: false,
ForTuning: false,
ConsistencyLevel: entity.ClCustomized,
GuaranteeTimestamp: 100,
IgnoreGrowing: false,
ForTuning: false,
UseDefaultConsistencyLevel: false,
}
assert.Equal(t, expected, opt)
})
Expand Down
10 changes: 4 additions & 6 deletions test/common/response_check.go
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ func EqualFields(t *testing.T, fieldA *entity.Field, fieldB *entity.Field) {
}
require.Empty(t, fieldA.IndexParams)
require.Empty(t, fieldB.IndexParams)
//require.Equal(t, fieldA.IndexParams, fieldB.IndexParams)
// require.Equal(t, fieldA.IndexParams, fieldB.IndexParams)
}

// EqualSchema equal two schemas
Expand All @@ -79,7 +79,8 @@ func EqualSchema(t *testing.T, schemaA entity.Schema, schemaB entity.Schema) {

// CheckCollection check collection
func CheckCollection(t *testing.T, actualCollection *entity.Collection, expCollName string, expShardNum int32,
expSchema *entity.Schema, expConsistencyLevel entity.ConsistencyLevel) {
expSchema *entity.Schema, expConsistencyLevel entity.ConsistencyLevel,
) {
require.Equalf(t, expCollName, actualCollection.Name, fmt.Sprintf("Expected collection name: %s, actual: %v", expCollName, actualCollection.Name))
require.Equalf(t, expShardNum, actualCollection.ShardNum, fmt.Sprintf("Expected ShardNum: %d, actual: %d", expShardNum, actualCollection.ShardNum))
require.Equal(t, expConsistencyLevel, actualCollection.ConsistencyLevel, fmt.Sprintf("Expected ConsistencyLevel: %v, actual: %v", expConsistencyLevel, actualCollection.ConsistencyLevel))
Expand Down Expand Up @@ -242,8 +243,7 @@ func CheckSearchResult(t *testing.T, actualSearchResults []client.SearchResult,
for _, actualSearchResult := range actualSearchResults {
require.Equal(t, actualSearchResult.ResultCount, expTopK)
}
//expContainedIds entity.Column

// expContainedIds entity.Column
}

func EqualIntSlice(a []int, b []int) bool {
Expand Down Expand Up @@ -293,8 +293,6 @@ func CheckQueryIteratorResult(ctx context.Context, t *testing.T, itr *client.Que
}
log.Fatalf("QueryIterator next gets error: %v", err)
}
//log.Printf("QueryIterator result len: %d", rs.Len())
//log.Printf("QueryIterator result data: %d", rs.GetColumn("int64"))

if opt.expBatchSize != nil {
actualBatchSize = append(actualBatchSize, rs.Len())
Expand Down
21 changes: 11 additions & 10 deletions test/common/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ const (
DefaultShards = int32(2)
DefaultNb = 3000
DefaultNq = 5
//DefaultNq = 1
// DefaultNq = 1
DefaultTopK = 10
TestCapacity = 100 // default array field capacity
TestMaxLen = 100 // default varchar field max length
Expand Down Expand Up @@ -98,7 +98,7 @@ var ArrayFieldType = []entity.FieldType{
entity.FieldTypeInt64,
entity.FieldTypeFloat,
entity.FieldTypeDouble,
//entity.FieldTypeVarChar, //t.Skip("Waiting for varchar bytes array fixed")
// entity.FieldTypeVarChar, //t.Skip("Waiting for varchar bytes array fixed")
}

var AllArrayFieldsName = []string{
Expand Down Expand Up @@ -853,11 +853,11 @@ func GenDefaultJSONRows(start int, nb int, dim int64, enableDynamicField bool) [
Number int32 `json:"dynamicNumber" milvus:"name:dynamicNumber"`
String string `json:"dynamicString" milvus:"name:dynamicString"`
Bool bool `json:"dynamicBool" milvus:"name:dynamicBool"`
//List []int64 `json:"dynamicList" milvus:"name:dynamicList"`
// List []int64 `json:"dynamicList" milvus:"name:dynamicList"`
}

for i := start; i < start+nb; i++ {
//jsonStruct row and dynamic row
// jsonStruct row and dynamic row
var jsonStruct JSONStruct
if i%2 == 0 {
jsonStruct = JSONStruct{
Expand Down Expand Up @@ -888,7 +888,7 @@ func GenDefaultJSONRows(start int, nb int, dim int64, enableDynamicField bool) [
Number: int32(i),
String: strconv.Itoa(i),
Bool: i%2 == 0,
//List: []int64{int64(i), int64(i + 1)},
// List: []int64{int64(i), int64(i + 1)},
}

rows = append(rows, &baseDynamicRow)
Expand Down Expand Up @@ -1247,8 +1247,8 @@ func GenDynamicFieldData(start int, nb int) []entity.Column {
numberValues := make([]int32, 0, nb)
stringValues := make([]string, 0, nb)
boolValues := make([]bool, 0, nb)
//listValues := make([][]byte, 0, nb)
//m := make(map[string]interface{})
// listValues := make([][]byte, 0, nb)
// m := make(map[string]interface{})
for i := start; i < start+nb; i++ {
numberValues = append(numberValues, int32(i))
stringValues = append(stringValues, strconv.Itoa(i))
Expand All @@ -1266,7 +1266,7 @@ func GenDynamicFieldData(start int, nb int) []entity.Column {
entity.NewColumnInt32(DefaultDynamicNumberField, numberValues),
entity.NewColumnString(DefaultDynamicStringField, stringValues),
entity.NewColumnBool(DefaultDynamicBoolField, boolValues),
//entity.NewColumnJSONBytes(DefaultDynamicListField, listValues),
// entity.NewColumnJSONBytes(DefaultDynamicListField, listValues),
}
return data
}
Expand Down Expand Up @@ -1417,8 +1417,9 @@ var InvalidExpressions = []InvalidExprStruct{
{Expr: fmt.Sprintf("json_contains (%s['list'], [2])", DefaultJSONFieldName), ErrNil: true, ErrMsg: ""},
{Expr: fmt.Sprintf("json_contains_all (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "contains_all operation element must be an array"},
{Expr: fmt.Sprintf("JSON_CONTAINS_ANY (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "contains_any operation element must be an array"},
{Expr: fmt.Sprintf("json_contains_aby (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "invalid expression: json_contains_aby"},
{Expr: fmt.Sprintf("json_contains_aby (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "invalid expression: json_contains_aby"},
// unexpected change
// {Expr: fmt.Sprintf("json_contains_aby (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "invalid expression: json_contains_aby"},
// {Expr: fmt.Sprintf("json_contains_aby (%s['list'], 2)", DefaultJSONFieldName), ErrNil: false, ErrMsg: "invalid expression: json_contains_aby"},
{Expr: fmt.Sprintf("%s[-1] > %d", DefaultInt8ArrayField, TestCapacity), ErrNil: false, ErrMsg: "cannot parse expression"}, // array[-1] >
{Expr: fmt.Sprintf(fmt.Sprintf("%s[-1] > 1", DefaultJSONFieldName)), ErrNil: false, ErrMsg: "invalid expression"}, // json[-1] >
}
Expand Down
14 changes: 9 additions & 5 deletions test/testcases/delete_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ func TestDeleteInvalidExpr(t *testing.T) {

for _, _invalidExprs := range common.InvalidExpressions {
err := mc.Delete(ctx, collName, "", _invalidExprs.Expr)
common.CheckErr(t, err, _invalidExprs.ErrNil, _invalidExprs.ErrMsg)
common.CheckErr(t, err, _invalidExprs.ErrNil, _invalidExprs.ErrMsg, "invalid parameter")
}
}

Expand All @@ -411,13 +411,17 @@ func TestDeleteComplexExprWithoutLoading(t *testing.T) {
// connect
mc := createMilvusClient(ctx, t)

cp := CollectionParams{CollectionFieldsType: Int64FloatVecJSON, AutoID: false, EnableDynamicField: true,
ShardsNum: common.DefaultShards, Dim: common.DefaultDim}
cp := CollectionParams{
CollectionFieldsType: Int64FloatVecJSON, AutoID: false, EnableDynamicField: true,
ShardsNum: common.DefaultShards, Dim: common.DefaultDim,
}
collName := createCollection(ctx, t, mc, cp, client.WithConsistencyLevel(entity.ClStrong))

// prepare and insert data
dp := DataParams{CollectionName: collName, PartitionName: "", CollectionFieldsType: Int64FloatVecJSON,
start: 0, nb: common.DefaultNb, dim: common.DefaultDim, EnableDynamicField: true, WithRows: false}
dp := DataParams{
CollectionName: collName, PartitionName: "", CollectionFieldsType: Int64FloatVecJSON,
start: 0, nb: common.DefaultNb, dim: common.DefaultDim, EnableDynamicField: true, WithRows: false,
}
_, _ = insertData(ctx, t, mc, dp)
mc.Flush(ctx, collName, false)

Expand Down
Loading

0 comments on commit 00002cb

Please sign in to comment.