diff --git a/go.mod b/go.mod index 85591473efc6..d590d7d75368 100644 --- a/go.mod +++ b/go.mod @@ -10,7 +10,7 @@ require ( github.com/DataDog/zstd v1.5.2 github.com/NYTimes/gziphandler v1.1.1 github.com/antithesishq/antithesis-sdk-go v0.3.8 - github.com/ava-labs/coreth v0.13.9-rc.1 + github.com/ava-labs/coreth v0.13.9-rc.1.0.20241204164043-9feca2730025 github.com/ava-labs/ledger-avalanche/go v0.0.0-20241009183145-e6f90a8a1a60 github.com/btcsuite/btcd/btcutil v1.1.3 github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 @@ -190,3 +190,5 @@ require ( sigs.k8s.io/structured-merge-diff/v4 v4.4.1 // indirect sigs.k8s.io/yaml v1.3.0 // indirect ) + +// replace github.com/ava-labs/coreth => ../coreth diff --git a/go.sum b/go.sum index 50b7e212cc7a..88fe2fd8ff03 100644 --- a/go.sum +++ b/go.sum @@ -64,8 +64,8 @@ github.com/antithesishq/antithesis-sdk-go v0.3.8/go.mod h1:IUpT2DPAKh6i/YhSbt6Gl github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= -github.com/ava-labs/coreth v0.13.9-rc.1 h1:qIICpC/OZGYUP37QnLgIqqwGmxnLwLpZaUlqJNI85vU= -github.com/ava-labs/coreth v0.13.9-rc.1/go.mod h1:7aMsRIo/3GBE44qWZMjnfqdqfcfZ5yShTTm2LObLaYo= +github.com/ava-labs/coreth v0.13.9-rc.1.0.20241204164043-9feca2730025 h1:S4mzNzg00jnuejJvzykoHGze75lRTYkEvG2JyIXwmqw= +github.com/ava-labs/coreth v0.13.9-rc.1.0.20241204164043-9feca2730025/go.mod h1:7aMsRIo/3GBE44qWZMjnfqdqfcfZ5yShTTm2LObLaYo= github.com/ava-labs/ledger-avalanche/go v0.0.0-20241009183145-e6f90a8a1a60 h1:EL66gtXOAwR/4KYBjOV03LTWgkEXvLePribLlJNu4g0= github.com/ava-labs/ledger-avalanche/go v0.0.0-20241009183145-e6f90a8a1a60/go.mod h1:/7qKobTfbzBu7eSTVaXMTr56yTYk4j2Px6/8G+idxHo= github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= diff --git a/proto/pb/sync/sync.pb.go b/proto/pb/sync/sync.pb.go index b22044c07cdf..dbe1bbb8b14b 100644 --- a/proto/pb/sync/sync.pb.go +++ b/proto/pb/sync/sync.pb.go @@ -68,163 +68,6 @@ func (x *GetMerkleRootResponse) GetRootHash() []byte { return nil } -type GetProofRequest struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` -} - -func (x *GetProofRequest) Reset() { - *x = GetProofRequest{} - if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[1] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetProofRequest) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetProofRequest) ProtoMessage() {} - -func (x *GetProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[1] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetProofRequest.ProtoReflect.Descriptor instead. -func (*GetProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{1} -} - -func (x *GetProofRequest) GetKey() []byte { - if x != nil { - return x.Key - } - return nil -} - -type GetProofResponse struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Proof *Proof `protobuf:"bytes,1,opt,name=proof,proto3" json:"proof,omitempty"` -} - -func (x *GetProofResponse) Reset() { - *x = GetProofResponse{} - if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[2] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *GetProofResponse) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*GetProofResponse) ProtoMessage() {} - -func (x *GetProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[2] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use GetProofResponse.ProtoReflect.Descriptor instead. -func (*GetProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{2} -} - -func (x *GetProofResponse) GetProof() *Proof { - if x != nil { - return x.Proof - } - return nil -} - -type Proof struct { - state protoimpl.MessageState - sizeCache protoimpl.SizeCache - unknownFields protoimpl.UnknownFields - - Key []byte `protobuf:"bytes,1,opt,name=key,proto3" json:"key,omitempty"` - Value *MaybeBytes `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` - Proof []*ProofNode `protobuf:"bytes,3,rep,name=proof,proto3" json:"proof,omitempty"` -} - -func (x *Proof) Reset() { - *x = Proof{} - if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[3] - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - ms.StoreMessageInfo(mi) - } -} - -func (x *Proof) String() string { - return protoimpl.X.MessageStringOf(x) -} - -func (*Proof) ProtoMessage() {} - -func (x *Proof) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[3] - if protoimpl.UnsafeEnabled && x != nil { - ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) - if ms.LoadMessageInfo() == nil { - ms.StoreMessageInfo(mi) - } - return ms - } - return mi.MessageOf(x) -} - -// Deprecated: Use Proof.ProtoReflect.Descriptor instead. -func (*Proof) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{3} -} - -func (x *Proof) GetKey() []byte { - if x != nil { - return x.Key - } - return nil -} - -func (x *Proof) GetValue() *MaybeBytes { - if x != nil { - return x.Value - } - return nil -} - -func (x *Proof) GetProof() []*ProofNode { - if x != nil { - return x.Proof - } - return nil -} - // For use in sync client, which has a restriction on the size of // the response. GetChangeProof in the DB service doesn't. type SyncGetChangeProofRequest struct { @@ -243,7 +86,7 @@ type SyncGetChangeProofRequest struct { func (x *SyncGetChangeProofRequest) Reset() { *x = SyncGetChangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[4] + mi := &file_sync_sync_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -256,7 +99,7 @@ func (x *SyncGetChangeProofRequest) String() string { func (*SyncGetChangeProofRequest) ProtoMessage() {} func (x *SyncGetChangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[4] + mi := &file_sync_sync_proto_msgTypes[1] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -269,7 +112,7 @@ func (x *SyncGetChangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use SyncGetChangeProofRequest.ProtoReflect.Descriptor instead. func (*SyncGetChangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{4} + return file_sync_sync_proto_rawDescGZIP(), []int{1} } func (x *SyncGetChangeProofRequest) GetStartRootHash() []byte { @@ -329,7 +172,7 @@ type SyncGetChangeProofResponse struct { func (x *SyncGetChangeProofResponse) Reset() { *x = SyncGetChangeProofResponse{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[5] + mi := &file_sync_sync_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -342,7 +185,7 @@ func (x *SyncGetChangeProofResponse) String() string { func (*SyncGetChangeProofResponse) ProtoMessage() {} func (x *SyncGetChangeProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[5] + mi := &file_sync_sync_proto_msgTypes[2] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -355,7 +198,7 @@ func (x *SyncGetChangeProofResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use SyncGetChangeProofResponse.ProtoReflect.Descriptor instead. func (*SyncGetChangeProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{5} + return file_sync_sync_proto_rawDescGZIP(), []int{2} } func (m *SyncGetChangeProofResponse) GetResponse() isSyncGetChangeProofResponse_Response { @@ -410,7 +253,7 @@ type GetChangeProofRequest struct { func (x *GetChangeProofRequest) Reset() { *x = GetChangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[6] + mi := &file_sync_sync_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -423,7 +266,7 @@ func (x *GetChangeProofRequest) String() string { func (*GetChangeProofRequest) ProtoMessage() {} func (x *GetChangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[6] + mi := &file_sync_sync_proto_msgTypes[3] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -436,7 +279,7 @@ func (x *GetChangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetChangeProofRequest.ProtoReflect.Descriptor instead. func (*GetChangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{6} + return file_sync_sync_proto_rawDescGZIP(), []int{3} } func (x *GetChangeProofRequest) GetStartRootHash() []byte { @@ -489,7 +332,7 @@ type GetChangeProofResponse struct { func (x *GetChangeProofResponse) Reset() { *x = GetChangeProofResponse{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[7] + mi := &file_sync_sync_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -502,7 +345,7 @@ func (x *GetChangeProofResponse) String() string { func (*GetChangeProofResponse) ProtoMessage() {} func (x *GetChangeProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[7] + mi := &file_sync_sync_proto_msgTypes[4] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -515,7 +358,7 @@ func (x *GetChangeProofResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetChangeProofResponse.ProtoReflect.Descriptor instead. func (*GetChangeProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{7} + return file_sync_sync_proto_rawDescGZIP(), []int{4} } func (m *GetChangeProofResponse) GetResponse() isGetChangeProofResponse_Response { @@ -570,7 +413,7 @@ type VerifyChangeProofRequest struct { func (x *VerifyChangeProofRequest) Reset() { *x = VerifyChangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[8] + mi := &file_sync_sync_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -583,7 +426,7 @@ func (x *VerifyChangeProofRequest) String() string { func (*VerifyChangeProofRequest) ProtoMessage() {} func (x *VerifyChangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[8] + mi := &file_sync_sync_proto_msgTypes[5] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -596,7 +439,7 @@ func (x *VerifyChangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use VerifyChangeProofRequest.ProtoReflect.Descriptor instead. func (*VerifyChangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{8} + return file_sync_sync_proto_rawDescGZIP(), []int{5} } func (x *VerifyChangeProofRequest) GetProof() *ChangeProof { @@ -639,7 +482,7 @@ type VerifyChangeProofResponse struct { func (x *VerifyChangeProofResponse) Reset() { *x = VerifyChangeProofResponse{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[9] + mi := &file_sync_sync_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -652,7 +495,7 @@ func (x *VerifyChangeProofResponse) String() string { func (*VerifyChangeProofResponse) ProtoMessage() {} func (x *VerifyChangeProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[9] + mi := &file_sync_sync_proto_msgTypes[6] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -665,7 +508,7 @@ func (x *VerifyChangeProofResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use VerifyChangeProofResponse.ProtoReflect.Descriptor instead. func (*VerifyChangeProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{9} + return file_sync_sync_proto_rawDescGZIP(), []int{6} } func (x *VerifyChangeProofResponse) GetError() string { @@ -686,7 +529,7 @@ type CommitChangeProofRequest struct { func (x *CommitChangeProofRequest) Reset() { *x = CommitChangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[10] + mi := &file_sync_sync_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -699,7 +542,7 @@ func (x *CommitChangeProofRequest) String() string { func (*CommitChangeProofRequest) ProtoMessage() {} func (x *CommitChangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[10] + mi := &file_sync_sync_proto_msgTypes[7] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -712,7 +555,7 @@ func (x *CommitChangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use CommitChangeProofRequest.ProtoReflect.Descriptor instead. func (*CommitChangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{10} + return file_sync_sync_proto_rawDescGZIP(), []int{7} } func (x *CommitChangeProofRequest) GetProof() *ChangeProof { @@ -739,7 +582,7 @@ type SyncGetRangeProofRequest struct { func (x *SyncGetRangeProofRequest) Reset() { *x = SyncGetRangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[11] + mi := &file_sync_sync_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -752,7 +595,7 @@ func (x *SyncGetRangeProofRequest) String() string { func (*SyncGetRangeProofRequest) ProtoMessage() {} func (x *SyncGetRangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[11] + mi := &file_sync_sync_proto_msgTypes[8] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -765,7 +608,7 @@ func (x *SyncGetRangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use SyncGetRangeProofRequest.ProtoReflect.Descriptor instead. func (*SyncGetRangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{11} + return file_sync_sync_proto_rawDescGZIP(), []int{8} } func (x *SyncGetRangeProofRequest) GetRootHash() []byte { @@ -817,7 +660,7 @@ type GetRangeProofRequest struct { func (x *GetRangeProofRequest) Reset() { *x = GetRangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[12] + mi := &file_sync_sync_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -830,7 +673,7 @@ func (x *GetRangeProofRequest) String() string { func (*GetRangeProofRequest) ProtoMessage() {} func (x *GetRangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[12] + mi := &file_sync_sync_proto_msgTypes[9] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -843,7 +686,7 @@ func (x *GetRangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use GetRangeProofRequest.ProtoReflect.Descriptor instead. func (*GetRangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{12} + return file_sync_sync_proto_rawDescGZIP(), []int{9} } func (x *GetRangeProofRequest) GetRootHash() []byte { @@ -888,7 +731,7 @@ type VerifyRangeProofRequest struct { func (x *VerifyRangeProofRequest) Reset() { *x = VerifyRangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[13] + mi := &file_sync_sync_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -901,7 +744,7 @@ func (x *VerifyRangeProofRequest) String() string { func (*VerifyRangeProofRequest) ProtoMessage() {} func (x *VerifyRangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[13] + mi := &file_sync_sync_proto_msgTypes[10] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -914,7 +757,7 @@ func (x *VerifyRangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use VerifyRangeProofRequest.ProtoReflect.Descriptor instead. func (*VerifyRangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{13} + return file_sync_sync_proto_rawDescGZIP(), []int{10} } func (x *VerifyRangeProofRequest) GetProof() *RangeProof { @@ -957,7 +800,7 @@ type VerifyRangeProofResponse struct { func (x *VerifyRangeProofResponse) Reset() { *x = VerifyRangeProofResponse{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[14] + mi := &file_sync_sync_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -970,7 +813,7 @@ func (x *VerifyRangeProofResponse) String() string { func (*VerifyRangeProofResponse) ProtoMessage() {} func (x *VerifyRangeProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[14] + mi := &file_sync_sync_proto_msgTypes[11] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -983,7 +826,7 @@ func (x *VerifyRangeProofResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use VerifyRangeProofResponse.ProtoReflect.Descriptor instead. func (*VerifyRangeProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{14} + return file_sync_sync_proto_rawDescGZIP(), []int{11} } func (x *VerifyRangeProofResponse) GetError() string { @@ -1004,7 +847,7 @@ type GetRangeProofResponse struct { func (x *GetRangeProofResponse) Reset() { *x = GetRangeProofResponse{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[15] + mi := &file_sync_sync_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1017,7 +860,7 @@ func (x *GetRangeProofResponse) String() string { func (*GetRangeProofResponse) ProtoMessage() {} func (x *GetRangeProofResponse) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[15] + mi := &file_sync_sync_proto_msgTypes[12] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1030,7 +873,7 @@ func (x *GetRangeProofResponse) ProtoReflect() protoreflect.Message { // Deprecated: Use GetRangeProofResponse.ProtoReflect.Descriptor instead. func (*GetRangeProofResponse) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{15} + return file_sync_sync_proto_rawDescGZIP(), []int{12} } func (x *GetRangeProofResponse) GetProof() *RangeProof { @@ -1053,7 +896,7 @@ type CommitRangeProofRequest struct { func (x *CommitRangeProofRequest) Reset() { *x = CommitRangeProofRequest{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[16] + mi := &file_sync_sync_proto_msgTypes[13] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1066,7 +909,7 @@ func (x *CommitRangeProofRequest) String() string { func (*CommitRangeProofRequest) ProtoMessage() {} func (x *CommitRangeProofRequest) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[16] + mi := &file_sync_sync_proto_msgTypes[13] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1079,7 +922,7 @@ func (x *CommitRangeProofRequest) ProtoReflect() protoreflect.Message { // Deprecated: Use CommitRangeProofRequest.ProtoReflect.Descriptor instead. func (*CommitRangeProofRequest) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{16} + return file_sync_sync_proto_rawDescGZIP(), []int{13} } func (x *CommitRangeProofRequest) GetStartKey() *MaybeBytes { @@ -1116,7 +959,7 @@ type ChangeProof struct { func (x *ChangeProof) Reset() { *x = ChangeProof{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[17] + mi := &file_sync_sync_proto_msgTypes[14] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1129,7 +972,7 @@ func (x *ChangeProof) String() string { func (*ChangeProof) ProtoMessage() {} func (x *ChangeProof) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[17] + mi := &file_sync_sync_proto_msgTypes[14] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1142,7 +985,7 @@ func (x *ChangeProof) ProtoReflect() protoreflect.Message { // Deprecated: Use ChangeProof.ProtoReflect.Descriptor instead. func (*ChangeProof) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{17} + return file_sync_sync_proto_rawDescGZIP(), []int{14} } func (x *ChangeProof) GetStartProof() []*ProofNode { @@ -1179,7 +1022,7 @@ type RangeProof struct { func (x *RangeProof) Reset() { *x = RangeProof{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[18] + mi := &file_sync_sync_proto_msgTypes[15] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1192,7 +1035,7 @@ func (x *RangeProof) String() string { func (*RangeProof) ProtoMessage() {} func (x *RangeProof) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[18] + mi := &file_sync_sync_proto_msgTypes[15] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1205,7 +1048,7 @@ func (x *RangeProof) ProtoReflect() protoreflect.Message { // Deprecated: Use RangeProof.ProtoReflect.Descriptor instead. func (*RangeProof) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{18} + return file_sync_sync_proto_rawDescGZIP(), []int{15} } func (x *RangeProof) GetStartProof() []*ProofNode { @@ -1242,7 +1085,7 @@ type ProofNode struct { func (x *ProofNode) Reset() { *x = ProofNode{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[19] + mi := &file_sync_sync_proto_msgTypes[16] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1255,7 +1098,7 @@ func (x *ProofNode) String() string { func (*ProofNode) ProtoMessage() {} func (x *ProofNode) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[19] + mi := &file_sync_sync_proto_msgTypes[16] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1268,7 +1111,7 @@ func (x *ProofNode) ProtoReflect() protoreflect.Message { // Deprecated: Use ProofNode.ProtoReflect.Descriptor instead. func (*ProofNode) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{19} + return file_sync_sync_proto_rawDescGZIP(), []int{16} } func (x *ProofNode) GetKey() *Key { @@ -1304,7 +1147,7 @@ type KeyChange struct { func (x *KeyChange) Reset() { *x = KeyChange{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[20] + mi := &file_sync_sync_proto_msgTypes[17] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1317,7 +1160,7 @@ func (x *KeyChange) String() string { func (*KeyChange) ProtoMessage() {} func (x *KeyChange) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[20] + mi := &file_sync_sync_proto_msgTypes[17] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1330,7 +1173,7 @@ func (x *KeyChange) ProtoReflect() protoreflect.Message { // Deprecated: Use KeyChange.ProtoReflect.Descriptor instead. func (*KeyChange) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{20} + return file_sync_sync_proto_rawDescGZIP(), []int{17} } func (x *KeyChange) GetKey() []byte { @@ -1359,7 +1202,7 @@ type Key struct { func (x *Key) Reset() { *x = Key{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[21] + mi := &file_sync_sync_proto_msgTypes[18] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1372,7 +1215,7 @@ func (x *Key) String() string { func (*Key) ProtoMessage() {} func (x *Key) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[21] + mi := &file_sync_sync_proto_msgTypes[18] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1385,7 +1228,7 @@ func (x *Key) ProtoReflect() protoreflect.Message { // Deprecated: Use Key.ProtoReflect.Descriptor instead. func (*Key) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{21} + return file_sync_sync_proto_rawDescGZIP(), []int{18} } func (x *Key) GetLength() uint64 { @@ -1416,7 +1259,7 @@ type MaybeBytes struct { func (x *MaybeBytes) Reset() { *x = MaybeBytes{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[22] + mi := &file_sync_sync_proto_msgTypes[19] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1429,7 +1272,7 @@ func (x *MaybeBytes) String() string { func (*MaybeBytes) ProtoMessage() {} func (x *MaybeBytes) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[22] + mi := &file_sync_sync_proto_msgTypes[19] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1442,7 +1285,7 @@ func (x *MaybeBytes) ProtoReflect() protoreflect.Message { // Deprecated: Use MaybeBytes.ProtoReflect.Descriptor instead. func (*MaybeBytes) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{22} + return file_sync_sync_proto_rawDescGZIP(), []int{19} } func (x *MaybeBytes) GetValue() []byte { @@ -1471,7 +1314,7 @@ type KeyValue struct { func (x *KeyValue) Reset() { *x = KeyValue{} if protoimpl.UnsafeEnabled { - mi := &file_sync_sync_proto_msgTypes[23] + mi := &file_sync_sync_proto_msgTypes[20] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } @@ -1484,7 +1327,7 @@ func (x *KeyValue) String() string { func (*KeyValue) ProtoMessage() {} func (x *KeyValue) ProtoReflect() protoreflect.Message { - mi := &file_sync_sync_proto_msgTypes[23] + mi := &file_sync_sync_proto_msgTypes[20] if protoimpl.UnsafeEnabled && x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { @@ -1497,7 +1340,7 @@ func (x *KeyValue) ProtoReflect() protoreflect.Message { // Deprecated: Use KeyValue.ProtoReflect.Descriptor instead. func (*KeyValue) Descriptor() ([]byte, []int) { - return file_sync_sync_proto_rawDescGZIP(), []int{23} + return file_sync_sync_proto_rawDescGZIP(), []int{20} } func (x *KeyValue) GetKey() []byte { @@ -1523,20 +1366,33 @@ var file_sync_sync_proto_rawDesc = []byte{ 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x34, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x22, 0x23, 0x0a, 0x0f, 0x47, 0x65, - 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x10, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x22, - 0x35, 0x0a, 0x10, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, - 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x0b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x68, 0x0a, 0x05, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, - 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, - 0x79, 0x12, 0x26, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x25, 0x0a, 0x05, 0x70, 0x72, 0x6f, - 0x6f, 0x66, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, - 0x22, 0xff, 0x01, 0x0a, 0x19, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, + 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x22, 0xff, 0x01, 0x0a, 0x19, 0x53, + 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0c, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, + 0x12, 0x22, 0x0a, 0x0d, 0x65, 0x6e, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, + 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x65, 0x6e, 0x64, 0x52, 0x6f, 0x6f, 0x74, + 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, + 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, + 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, + 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, + 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, + 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, + 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, + 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, + 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x95, 0x01, 0x0a, + 0x1a, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, + 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x0c, 0x63, + 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, + 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, + 0x6f, 0x6f, 0x66, 0x12, 0x33, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, + 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0a, 0x72, 0x61, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, + 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xda, 0x01, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, 0x74, 0x61, 0x72, 0x74, 0x52, 0x6f, @@ -1550,209 +1406,180 @@ var file_sync_sync_proto_rawDesc = []byte{ 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, - 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, - 0x18, 0x06, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, - 0x69, 0x74, 0x22, 0x95, 0x01, 0x0a, 0x1a, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x43, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, - 0x65, 0x12, 0x36, 0x0a, 0x0c, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, - 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x68, - 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x33, 0x0a, 0x0b, 0x72, 0x61, 0x6e, - 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x48, 0x00, 0x52, 0x0a, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x0a, - 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xda, 0x01, 0x0a, 0x15, 0x47, - 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x0f, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x72, 0x6f, - 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x0d, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x22, 0x0a, 0x0d, - 0x65, 0x6e, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0c, 0x52, 0x0b, 0x65, 0x6e, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, - 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, - 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, - 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, - 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, - 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, - 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0x88, 0x01, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x12, 0x36, 0x0a, 0x0c, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, + 0x74, 0x22, 0x88, 0x01, 0x0a, 0x16, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x36, 0x0a, 0x0c, + 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, + 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2a, 0x0a, 0x10, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x6e, 0x6f, 0x74, + 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, + 0x52, 0x0e, 0x72, 0x6f, 0x6f, 0x74, 0x4e, 0x6f, 0x74, 0x50, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, + 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22, 0xcb, 0x01, 0x0a, + 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, + 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x48, 0x00, 0x52, 0x0b, 0x63, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2a, 0x0a, 0x10, 0x72, 0x6f, - 0x6f, 0x74, 0x5f, 0x6e, 0x6f, 0x74, 0x5f, 0x70, 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x0e, 0x72, 0x6f, 0x6f, 0x74, 0x4e, 0x6f, 0x74, 0x50, - 0x72, 0x65, 0x73, 0x65, 0x6e, 0x74, 0x42, 0x0a, 0x0a, 0x08, 0x72, 0x65, 0x73, 0x70, 0x6f, 0x6e, - 0x73, 0x65, 0x22, 0xcb, 0x01, 0x0a, 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, - 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, - 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, - 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, - 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, - 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x72, - 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x10, - 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, - 0x22, 0x31, 0x0a, 0x19, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, - 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, - 0x72, 0x6f, 0x72, 0x22, 0x43, 0x0a, 0x18, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, - 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, - 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xcf, 0x01, 0x0a, 0x18, 0x53, 0x79, 0x6e, - 0x63, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, - 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, - 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, - 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, + 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, - 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, - 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, - 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, - 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, - 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xaa, 0x01, 0x0a, 0x14, 0x47, - 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, - 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, - 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, - 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, + 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, + 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, + 0x73, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x10, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, + 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x22, 0x31, 0x0a, 0x19, 0x56, 0x65, + 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, + 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, + 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x43, 0x0a, + 0x18, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, + 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x27, 0x0a, 0x05, 0x70, 0x72, 0x6f, + 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x11, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, + 0x6f, 0x66, 0x22, 0xcf, 0x01, 0x0a, 0x18, 0x53, 0x79, 0x6e, 0x63, 0x47, 0x65, 0x74, 0x52, 0x61, + 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, + 0x1b, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, + 0x28, 0x0c, 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, + 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, + 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, + 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, + 0x6d, 0x69, 0x74, 0x12, 0x1f, 0x0a, 0x0b, 0x62, 0x79, 0x74, 0x65, 0x73, 0x5f, 0x6c, 0x69, 0x6d, + 0x69, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0a, 0x62, 0x79, 0x74, 0x65, 0x73, 0x4c, + 0x69, 0x6d, 0x69, 0x74, 0x22, 0xaa, 0x01, 0x0a, 0x14, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, + 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1b, 0x0a, + 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, + 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, + 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, + 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, + 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, + 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, + 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, + 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, + 0x74, 0x22, 0xc9, 0x01, 0x0a, 0x17, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, + 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, + 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, + 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, + 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, + 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, + 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, + 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, + 0x2c, 0x0a, 0x12, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, + 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x10, 0x65, 0x78, 0x70, + 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x22, 0x30, 0x0a, + 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, + 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, + 0x3f, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, + 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, + 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, + 0x22, 0xa6, 0x01, 0x0a, 0x17, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, + 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, + 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, + 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, + 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, + 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, + 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x0b, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, + 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, + 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x0a, 0x72, + 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0x9f, 0x01, 0x0a, 0x0b, 0x43, 0x68, + 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x30, 0x0a, 0x0b, 0x73, 0x74, 0x61, + 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, + 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2c, 0x0a, 0x09, 0x65, + 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, + 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x30, 0x0a, 0x0b, 0x6b, 0x65, 0x79, + 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, + 0x0a, 0x6b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x22, 0x9b, 0x01, 0x0a, 0x0a, + 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x30, 0x0a, 0x0b, 0x73, 0x74, + 0x61, 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, + 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2c, 0x0a, 0x09, + 0x65, 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, + 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, + 0x52, 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x0a, 0x6b, 0x65, + 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, + 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, + 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, 0xd6, 0x01, 0x0a, 0x09, 0x50, 0x72, + 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x1b, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, + 0x20, 0x01, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x52, + 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x0d, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x5f, 0x6f, 0x72, + 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, + 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x0b, 0x76, + 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x48, 0x61, 0x73, 0x68, 0x12, 0x39, 0x0a, 0x08, 0x63, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x2e, 0x43, 0x68, + 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x63, 0x68, 0x69, + 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x1a, 0x3b, 0x0a, 0x0d, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, + 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, + 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, + 0x38, 0x01, 0x22, 0x45, 0x0a, 0x09, 0x4b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, + 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, + 0x79, 0x12, 0x26, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x6b, 0x65, - 0x79, 0x5f, 0x6c, 0x69, 0x6d, 0x69, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x08, 0x6b, - 0x65, 0x79, 0x4c, 0x69, 0x6d, 0x69, 0x74, 0x22, 0xc9, 0x01, 0x0a, 0x17, 0x56, 0x65, 0x72, 0x69, - 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, - 0x65, 0x73, 0x74, 0x12, 0x26, 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x09, 0x73, - 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, - 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, - 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, - 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x2c, 0x0a, 0x12, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, - 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x04, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x10, 0x65, 0x78, 0x70, 0x65, 0x63, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x48, - 0x61, 0x73, 0x68, 0x22, 0x30, 0x0a, 0x18, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, - 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x3f, 0x0a, 0x15, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, - 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x26, - 0x0a, 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, - 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, - 0x05, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x22, 0xa6, 0x01, 0x0a, 0x17, 0x43, 0x6f, 0x6d, 0x6d, 0x69, - 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, - 0x73, 0x74, 0x12, 0x2d, 0x0a, 0x09, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, - 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, - 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x08, 0x73, 0x74, 0x61, 0x72, 0x74, 0x4b, 0x65, - 0x79, 0x12, 0x29, 0x0a, 0x07, 0x65, 0x6e, 0x64, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x02, 0x20, 0x01, - 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, - 0x79, 0x74, 0x65, 0x73, 0x52, 0x06, 0x65, 0x6e, 0x64, 0x4b, 0x65, 0x79, 0x12, 0x31, 0x0a, 0x0b, - 0x72, 0x61, 0x6e, 0x67, 0x65, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x03, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, - 0x6f, 0x6f, 0x66, 0x52, 0x0a, 0x72, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x22, - 0x9f, 0x01, 0x0a, 0x0b, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, - 0x30, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x01, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x12, 0x2c, 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x02, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, - 0x30, 0x0a, 0x0b, 0x6b, 0x65, 0x79, 0x5f, 0x63, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x73, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x0a, 0x6b, 0x65, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, - 0x73, 0x22, 0x9b, 0x01, 0x0a, 0x0a, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x12, 0x30, 0x0a, 0x0b, 0x73, 0x74, 0x61, 0x72, 0x74, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, - 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x0a, 0x73, 0x74, 0x61, 0x72, 0x74, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x12, 0x2c, 0x0a, 0x09, 0x65, 0x6e, 0x64, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, - 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x52, 0x08, 0x65, 0x6e, 0x64, 0x50, 0x72, 0x6f, 0x6f, 0x66, - 0x12, 0x2d, 0x0a, 0x0a, 0x6b, 0x65, 0x79, 0x5f, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x18, 0x03, - 0x20, 0x03, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x56, - 0x61, 0x6c, 0x75, 0x65, 0x52, 0x09, 0x6b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x73, 0x22, - 0xd6, 0x01, 0x0a, 0x09, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x1b, 0x0a, - 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x09, 0x2e, 0x73, 0x79, 0x6e, - 0x63, 0x2e, 0x4b, 0x65, 0x79, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x0d, 0x76, 0x61, - 0x6c, 0x75, 0x65, 0x5f, 0x6f, 0x72, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, - 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, - 0x74, 0x65, 0x73, 0x52, 0x0b, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4f, 0x72, 0x48, 0x61, 0x73, 0x68, - 0x12, 0x39, 0x0a, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x18, 0x03, 0x20, 0x03, - 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x4e, - 0x6f, 0x64, 0x65, 0x2e, 0x43, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, - 0x79, 0x52, 0x08, 0x63, 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x1a, 0x3b, 0x0a, 0x0d, 0x43, - 0x68, 0x69, 0x6c, 0x64, 0x72, 0x65, 0x6e, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, - 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x45, 0x0a, 0x09, 0x4b, 0x65, 0x79, 0x43, - 0x68, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, - 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x26, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, - 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x10, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x4d, 0x61, - 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, - 0x33, 0x0a, 0x03, 0x4b, 0x65, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, - 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x14, - 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, - 0x61, 0x6c, 0x75, 0x65, 0x22, 0x41, 0x0a, 0x0a, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, - 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, - 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x6e, - 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, - 0x4e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, 0x22, 0x32, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x56, 0x61, - 0x6c, 0x75, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, - 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, - 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0x96, 0x05, 0x0a, 0x02, - 0x44, 0x42, 0x12, 0x44, 0x0a, 0x0d, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, - 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, - 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x37, 0x0a, 0x05, 0x43, 0x6c, 0x65, 0x61, - 0x72, 0x12, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, - 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, - 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, - 0x79, 0x12, 0x39, 0x0a, 0x08, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x15, 0x2e, - 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, 0x0a, 0x0e, - 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1b, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x73, 0x79, - 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x56, 0x65, 0x72, - 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1e, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, - 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, - 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, - 0x4b, 0x0a, 0x11, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, - 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, - 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, - 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, - 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x48, 0x0a, 0x0d, - 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1a, 0x2e, - 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, - 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, - 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x10, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, - 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, 0x73, 0x79, 0x6e, - 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, - 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, + 0x65, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x33, 0x0a, 0x03, 0x4b, 0x65, 0x79, + 0x12, 0x16, 0x0a, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01, 0x28, 0x04, + 0x52, 0x06, 0x6c, 0x65, 0x6e, 0x67, 0x74, 0x68, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, + 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x41, + 0x0a, 0x0a, 0x4d, 0x61, 0x79, 0x62, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x14, 0x0a, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, + 0x75, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x69, 0x73, 0x5f, 0x6e, 0x6f, 0x74, 0x68, 0x69, 0x6e, 0x67, + 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x09, 0x69, 0x73, 0x4e, 0x6f, 0x74, 0x68, 0x69, 0x6e, + 0x67, 0x22, 0x32, 0x0a, 0x08, 0x4b, 0x65, 0x79, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x10, 0x0a, + 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, + 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, + 0x76, 0x61, 0x6c, 0x75, 0x65, 0x32, 0xdb, 0x04, 0x0a, 0x02, 0x44, 0x42, 0x12, 0x44, 0x0a, 0x0d, + 0x47, 0x65, 0x74, 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x16, 0x2e, + 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, + 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, + 0x4d, 0x65, 0x72, 0x6b, 0x6c, 0x65, 0x52, 0x6f, 0x6f, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, + 0x73, 0x65, 0x12, 0x37, 0x0a, 0x05, 0x43, 0x6c, 0x65, 0x61, 0x72, 0x12, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x4b, 0x0a, 0x0e, 0x47, + 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1b, 0x2e, + 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, + 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x73, 0x79, 0x6e, + 0x63, 0x2e, 0x47, 0x65, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x56, 0x65, 0x72, 0x69, + 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1e, 0x2e, + 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, + 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1f, 0x2e, + 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x43, 0x68, 0x61, 0x6e, 0x67, + 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x4b, + 0x0a, 0x11, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, + 0x6f, 0x6f, 0x66, 0x12, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, + 0x74, 0x43, 0x68, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, + 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, + 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x12, 0x48, 0x0a, 0x0d, 0x47, + 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1a, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, + 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1b, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x47, 0x65, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x73, + 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x51, 0x0a, 0x10, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, + 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, - 0x66, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x10, 0x43, 0x6f, 0x6d, - 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, - 0x73, 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, - 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, - 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, - 0x6d, 0x70, 0x74, 0x79, 0x42, 0x2f, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, - 0x6f, 0x6d, 0x2f, 0x61, 0x76, 0x61, 0x2d, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x61, 0x76, 0x61, 0x6c, - 0x61, 0x6e, 0x63, 0x68, 0x65, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x62, - 0x2f, 0x73, 0x79, 0x6e, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, + 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x1e, 0x2e, 0x73, 0x79, 0x6e, 0x63, 0x2e, + 0x56, 0x65, 0x72, 0x69, 0x66, 0x79, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, + 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x49, 0x0a, 0x10, 0x43, 0x6f, 0x6d, 0x6d, + 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x1d, 0x2e, 0x73, + 0x79, 0x6e, 0x63, 0x2e, 0x43, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x50, + 0x72, 0x6f, 0x6f, 0x66, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x16, 0x2e, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x45, 0x6d, + 0x70, 0x74, 0x79, 0x42, 0x2f, 0x5a, 0x2d, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, + 0x6d, 0x2f, 0x61, 0x76, 0x61, 0x2d, 0x6c, 0x61, 0x62, 0x73, 0x2f, 0x61, 0x76, 0x61, 0x6c, 0x61, + 0x6e, 0x63, 0x68, 0x65, 0x67, 0x6f, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x70, 0x62, 0x2f, + 0x73, 0x79, 0x6e, 0x63, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, } var ( @@ -1767,94 +1594,86 @@ func file_sync_sync_proto_rawDescGZIP() []byte { return file_sync_sync_proto_rawDescData } -var file_sync_sync_proto_msgTypes = make([]protoimpl.MessageInfo, 25) +var file_sync_sync_proto_msgTypes = make([]protoimpl.MessageInfo, 22) var file_sync_sync_proto_goTypes = []interface{}{ (*GetMerkleRootResponse)(nil), // 0: sync.GetMerkleRootResponse - (*GetProofRequest)(nil), // 1: sync.GetProofRequest - (*GetProofResponse)(nil), // 2: sync.GetProofResponse - (*Proof)(nil), // 3: sync.Proof - (*SyncGetChangeProofRequest)(nil), // 4: sync.SyncGetChangeProofRequest - (*SyncGetChangeProofResponse)(nil), // 5: sync.SyncGetChangeProofResponse - (*GetChangeProofRequest)(nil), // 6: sync.GetChangeProofRequest - (*GetChangeProofResponse)(nil), // 7: sync.GetChangeProofResponse - (*VerifyChangeProofRequest)(nil), // 8: sync.VerifyChangeProofRequest - (*VerifyChangeProofResponse)(nil), // 9: sync.VerifyChangeProofResponse - (*CommitChangeProofRequest)(nil), // 10: sync.CommitChangeProofRequest - (*SyncGetRangeProofRequest)(nil), // 11: sync.SyncGetRangeProofRequest - (*GetRangeProofRequest)(nil), // 12: sync.GetRangeProofRequest - (*VerifyRangeProofRequest)(nil), // 13: sync.VerifyRangeProofRequest - (*VerifyRangeProofResponse)(nil), // 14: sync.VerifyRangeProofResponse - (*GetRangeProofResponse)(nil), // 15: sync.GetRangeProofResponse - (*CommitRangeProofRequest)(nil), // 16: sync.CommitRangeProofRequest - (*ChangeProof)(nil), // 17: sync.ChangeProof - (*RangeProof)(nil), // 18: sync.RangeProof - (*ProofNode)(nil), // 19: sync.ProofNode - (*KeyChange)(nil), // 20: sync.KeyChange - (*Key)(nil), // 21: sync.Key - (*MaybeBytes)(nil), // 22: sync.MaybeBytes - (*KeyValue)(nil), // 23: sync.KeyValue - nil, // 24: sync.ProofNode.ChildrenEntry - (*emptypb.Empty)(nil), // 25: google.protobuf.Empty + (*SyncGetChangeProofRequest)(nil), // 1: sync.SyncGetChangeProofRequest + (*SyncGetChangeProofResponse)(nil), // 2: sync.SyncGetChangeProofResponse + (*GetChangeProofRequest)(nil), // 3: sync.GetChangeProofRequest + (*GetChangeProofResponse)(nil), // 4: sync.GetChangeProofResponse + (*VerifyChangeProofRequest)(nil), // 5: sync.VerifyChangeProofRequest + (*VerifyChangeProofResponse)(nil), // 6: sync.VerifyChangeProofResponse + (*CommitChangeProofRequest)(nil), // 7: sync.CommitChangeProofRequest + (*SyncGetRangeProofRequest)(nil), // 8: sync.SyncGetRangeProofRequest + (*GetRangeProofRequest)(nil), // 9: sync.GetRangeProofRequest + (*VerifyRangeProofRequest)(nil), // 10: sync.VerifyRangeProofRequest + (*VerifyRangeProofResponse)(nil), // 11: sync.VerifyRangeProofResponse + (*GetRangeProofResponse)(nil), // 12: sync.GetRangeProofResponse + (*CommitRangeProofRequest)(nil), // 13: sync.CommitRangeProofRequest + (*ChangeProof)(nil), // 14: sync.ChangeProof + (*RangeProof)(nil), // 15: sync.RangeProof + (*ProofNode)(nil), // 16: sync.ProofNode + (*KeyChange)(nil), // 17: sync.KeyChange + (*Key)(nil), // 18: sync.Key + (*MaybeBytes)(nil), // 19: sync.MaybeBytes + (*KeyValue)(nil), // 20: sync.KeyValue + nil, // 21: sync.ProofNode.ChildrenEntry + (*emptypb.Empty)(nil), // 22: google.protobuf.Empty } var file_sync_sync_proto_depIdxs = []int32{ - 3, // 0: sync.GetProofResponse.proof:type_name -> sync.Proof - 22, // 1: sync.Proof.value:type_name -> sync.MaybeBytes - 19, // 2: sync.Proof.proof:type_name -> sync.ProofNode - 22, // 3: sync.SyncGetChangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 4: sync.SyncGetChangeProofRequest.end_key:type_name -> sync.MaybeBytes - 17, // 5: sync.SyncGetChangeProofResponse.change_proof:type_name -> sync.ChangeProof - 18, // 6: sync.SyncGetChangeProofResponse.range_proof:type_name -> sync.RangeProof - 22, // 7: sync.GetChangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 8: sync.GetChangeProofRequest.end_key:type_name -> sync.MaybeBytes - 17, // 9: sync.GetChangeProofResponse.change_proof:type_name -> sync.ChangeProof - 17, // 10: sync.VerifyChangeProofRequest.proof:type_name -> sync.ChangeProof - 22, // 11: sync.VerifyChangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 12: sync.VerifyChangeProofRequest.end_key:type_name -> sync.MaybeBytes - 17, // 13: sync.CommitChangeProofRequest.proof:type_name -> sync.ChangeProof - 22, // 14: sync.SyncGetRangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 15: sync.SyncGetRangeProofRequest.end_key:type_name -> sync.MaybeBytes - 22, // 16: sync.GetRangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 17: sync.GetRangeProofRequest.end_key:type_name -> sync.MaybeBytes - 18, // 18: sync.VerifyRangeProofRequest.proof:type_name -> sync.RangeProof - 22, // 19: sync.VerifyRangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 20: sync.VerifyRangeProofRequest.end_key:type_name -> sync.MaybeBytes - 18, // 21: sync.GetRangeProofResponse.proof:type_name -> sync.RangeProof - 22, // 22: sync.CommitRangeProofRequest.start_key:type_name -> sync.MaybeBytes - 22, // 23: sync.CommitRangeProofRequest.end_key:type_name -> sync.MaybeBytes - 18, // 24: sync.CommitRangeProofRequest.range_proof:type_name -> sync.RangeProof - 19, // 25: sync.ChangeProof.start_proof:type_name -> sync.ProofNode - 19, // 26: sync.ChangeProof.end_proof:type_name -> sync.ProofNode - 20, // 27: sync.ChangeProof.key_changes:type_name -> sync.KeyChange - 19, // 28: sync.RangeProof.start_proof:type_name -> sync.ProofNode - 19, // 29: sync.RangeProof.end_proof:type_name -> sync.ProofNode - 23, // 30: sync.RangeProof.key_values:type_name -> sync.KeyValue - 21, // 31: sync.ProofNode.key:type_name -> sync.Key - 22, // 32: sync.ProofNode.value_or_hash:type_name -> sync.MaybeBytes - 24, // 33: sync.ProofNode.children:type_name -> sync.ProofNode.ChildrenEntry - 22, // 34: sync.KeyChange.value:type_name -> sync.MaybeBytes - 25, // 35: sync.DB.GetMerkleRoot:input_type -> google.protobuf.Empty - 25, // 36: sync.DB.Clear:input_type -> google.protobuf.Empty - 1, // 37: sync.DB.GetProof:input_type -> sync.GetProofRequest - 6, // 38: sync.DB.GetChangeProof:input_type -> sync.GetChangeProofRequest - 8, // 39: sync.DB.VerifyChangeProof:input_type -> sync.VerifyChangeProofRequest - 10, // 40: sync.DB.CommitChangeProof:input_type -> sync.CommitChangeProofRequest - 12, // 41: sync.DB.GetRangeProof:input_type -> sync.GetRangeProofRequest - 13, // 42: sync.DB.VerifyRangeProof:input_type -> sync.VerifyRangeProofRequest - 16, // 43: sync.DB.CommitRangeProof:input_type -> sync.CommitRangeProofRequest - 0, // 44: sync.DB.GetMerkleRoot:output_type -> sync.GetMerkleRootResponse - 25, // 45: sync.DB.Clear:output_type -> google.protobuf.Empty - 2, // 46: sync.DB.GetProof:output_type -> sync.GetProofResponse - 7, // 47: sync.DB.GetChangeProof:output_type -> sync.GetChangeProofResponse - 9, // 48: sync.DB.VerifyChangeProof:output_type -> sync.VerifyChangeProofResponse - 25, // 49: sync.DB.CommitChangeProof:output_type -> google.protobuf.Empty - 15, // 50: sync.DB.GetRangeProof:output_type -> sync.GetRangeProofResponse - 14, // 51: sync.DB.VerifyRangeProof:output_type -> sync.VerifyRangeProofResponse - 25, // 52: sync.DB.CommitRangeProof:output_type -> google.protobuf.Empty - 44, // [44:53] is the sub-list for method output_type - 35, // [35:44] is the sub-list for method input_type - 35, // [35:35] is the sub-list for extension type_name - 35, // [35:35] is the sub-list for extension extendee - 0, // [0:35] is the sub-list for field type_name + 19, // 0: sync.SyncGetChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 1: sync.SyncGetChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 14, // 2: sync.SyncGetChangeProofResponse.change_proof:type_name -> sync.ChangeProof + 15, // 3: sync.SyncGetChangeProofResponse.range_proof:type_name -> sync.RangeProof + 19, // 4: sync.GetChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 5: sync.GetChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 14, // 6: sync.GetChangeProofResponse.change_proof:type_name -> sync.ChangeProof + 14, // 7: sync.VerifyChangeProofRequest.proof:type_name -> sync.ChangeProof + 19, // 8: sync.VerifyChangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 9: sync.VerifyChangeProofRequest.end_key:type_name -> sync.MaybeBytes + 14, // 10: sync.CommitChangeProofRequest.proof:type_name -> sync.ChangeProof + 19, // 11: sync.SyncGetRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 12: sync.SyncGetRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 19, // 13: sync.GetRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 14: sync.GetRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 15: sync.VerifyRangeProofRequest.proof:type_name -> sync.RangeProof + 19, // 16: sync.VerifyRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 17: sync.VerifyRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 18: sync.GetRangeProofResponse.proof:type_name -> sync.RangeProof + 19, // 19: sync.CommitRangeProofRequest.start_key:type_name -> sync.MaybeBytes + 19, // 20: sync.CommitRangeProofRequest.end_key:type_name -> sync.MaybeBytes + 15, // 21: sync.CommitRangeProofRequest.range_proof:type_name -> sync.RangeProof + 16, // 22: sync.ChangeProof.start_proof:type_name -> sync.ProofNode + 16, // 23: sync.ChangeProof.end_proof:type_name -> sync.ProofNode + 17, // 24: sync.ChangeProof.key_changes:type_name -> sync.KeyChange + 16, // 25: sync.RangeProof.start_proof:type_name -> sync.ProofNode + 16, // 26: sync.RangeProof.end_proof:type_name -> sync.ProofNode + 20, // 27: sync.RangeProof.key_values:type_name -> sync.KeyValue + 18, // 28: sync.ProofNode.key:type_name -> sync.Key + 19, // 29: sync.ProofNode.value_or_hash:type_name -> sync.MaybeBytes + 21, // 30: sync.ProofNode.children:type_name -> sync.ProofNode.ChildrenEntry + 19, // 31: sync.KeyChange.value:type_name -> sync.MaybeBytes + 22, // 32: sync.DB.GetMerkleRoot:input_type -> google.protobuf.Empty + 22, // 33: sync.DB.Clear:input_type -> google.protobuf.Empty + 3, // 34: sync.DB.GetChangeProof:input_type -> sync.GetChangeProofRequest + 5, // 35: sync.DB.VerifyChangeProof:input_type -> sync.VerifyChangeProofRequest + 7, // 36: sync.DB.CommitChangeProof:input_type -> sync.CommitChangeProofRequest + 9, // 37: sync.DB.GetRangeProof:input_type -> sync.GetRangeProofRequest + 10, // 38: sync.DB.VerifyRangeProof:input_type -> sync.VerifyRangeProofRequest + 13, // 39: sync.DB.CommitRangeProof:input_type -> sync.CommitRangeProofRequest + 0, // 40: sync.DB.GetMerkleRoot:output_type -> sync.GetMerkleRootResponse + 22, // 41: sync.DB.Clear:output_type -> google.protobuf.Empty + 4, // 42: sync.DB.GetChangeProof:output_type -> sync.GetChangeProofResponse + 6, // 43: sync.DB.VerifyChangeProof:output_type -> sync.VerifyChangeProofResponse + 22, // 44: sync.DB.CommitChangeProof:output_type -> google.protobuf.Empty + 12, // 45: sync.DB.GetRangeProof:output_type -> sync.GetRangeProofResponse + 11, // 46: sync.DB.VerifyRangeProof:output_type -> sync.VerifyRangeProofResponse + 22, // 47: sync.DB.CommitRangeProof:output_type -> google.protobuf.Empty + 40, // [40:48] is the sub-list for method output_type + 32, // [32:40] is the sub-list for method input_type + 32, // [32:32] is the sub-list for extension type_name + 32, // [32:32] is the sub-list for extension extendee + 0, // [0:32] is the sub-list for field type_name } func init() { file_sync_sync_proto_init() } @@ -1876,42 +1695,6 @@ func file_sync_sync_proto_init() { } } file_sync_sync_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProofRequest); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_sync_sync_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*GetProofResponse); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_sync_sync_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { - switch v := v.(*Proof); i { - case 0: - return &v.state - case 1: - return &v.sizeCache - case 2: - return &v.unknownFields - default: - return nil - } - } - file_sync_sync_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SyncGetChangeProofRequest); i { case 0: return &v.state @@ -1923,7 +1706,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SyncGetChangeProofResponse); i { case 0: return &v.state @@ -1935,7 +1718,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetChangeProofRequest); i { case 0: return &v.state @@ -1947,7 +1730,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetChangeProofResponse); i { case 0: return &v.state @@ -1959,7 +1742,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*VerifyChangeProofRequest); i { case 0: return &v.state @@ -1971,7 +1754,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*VerifyChangeProofResponse); i { case 0: return &v.state @@ -1983,7 +1766,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CommitChangeProofRequest); i { case 0: return &v.state @@ -1995,7 +1778,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*SyncGetRangeProofRequest); i { case 0: return &v.state @@ -2007,7 +1790,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetRangeProofRequest); i { case 0: return &v.state @@ -2019,7 +1802,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*VerifyRangeProofRequest); i { case 0: return &v.state @@ -2031,7 +1814,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*VerifyRangeProofResponse); i { case 0: return &v.state @@ -2043,7 +1826,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*GetRangeProofResponse); i { case 0: return &v.state @@ -2055,7 +1838,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*CommitRangeProofRequest); i { case 0: return &v.state @@ -2067,7 +1850,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[14].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ChangeProof); i { case 0: return &v.state @@ -2079,7 +1862,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*RangeProof); i { case 0: return &v.state @@ -2091,7 +1874,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[16].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*ProofNode); i { case 0: return &v.state @@ -2103,7 +1886,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[17].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*KeyChange); i { case 0: return &v.state @@ -2115,7 +1898,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[21].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*Key); i { case 0: return &v.state @@ -2127,7 +1910,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[22].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[19].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*MaybeBytes); i { case 0: return &v.state @@ -2139,7 +1922,7 @@ func file_sync_sync_proto_init() { return nil } } - file_sync_sync_proto_msgTypes[23].Exporter = func(v interface{}, i int) interface{} { + file_sync_sync_proto_msgTypes[20].Exporter = func(v interface{}, i int) interface{} { switch v := v.(*KeyValue); i { case 0: return &v.state @@ -2152,11 +1935,11 @@ func file_sync_sync_proto_init() { } } } - file_sync_sync_proto_msgTypes[5].OneofWrappers = []interface{}{ + file_sync_sync_proto_msgTypes[2].OneofWrappers = []interface{}{ (*SyncGetChangeProofResponse_ChangeProof)(nil), (*SyncGetChangeProofResponse_RangeProof)(nil), } - file_sync_sync_proto_msgTypes[7].OneofWrappers = []interface{}{ + file_sync_sync_proto_msgTypes[4].OneofWrappers = []interface{}{ (*GetChangeProofResponse_ChangeProof)(nil), (*GetChangeProofResponse_RootNotPresent)(nil), } @@ -2166,7 +1949,7 @@ func file_sync_sync_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_sync_sync_proto_rawDesc, NumEnums: 0, - NumMessages: 25, + NumMessages: 22, NumExtensions: 0, NumServices: 1, }, diff --git a/proto/pb/sync/sync_grpc.pb.go b/proto/pb/sync/sync_grpc.pb.go index 0f758d493cec..b1d91cf8c89e 100644 --- a/proto/pb/sync/sync_grpc.pb.go +++ b/proto/pb/sync/sync_grpc.pb.go @@ -22,7 +22,6 @@ const _ = grpc.SupportPackageIsVersion7 const ( DB_GetMerkleRoot_FullMethodName = "/sync.DB/GetMerkleRoot" DB_Clear_FullMethodName = "/sync.DB/Clear" - DB_GetProof_FullMethodName = "/sync.DB/GetProof" DB_GetChangeProof_FullMethodName = "/sync.DB/GetChangeProof" DB_VerifyChangeProof_FullMethodName = "/sync.DB/VerifyChangeProof" DB_CommitChangeProof_FullMethodName = "/sync.DB/CommitChangeProof" @@ -37,7 +36,6 @@ const ( type DBClient interface { GetMerkleRoot(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*GetMerkleRootResponse, error) Clear(ctx context.Context, in *emptypb.Empty, opts ...grpc.CallOption) (*emptypb.Empty, error) - GetProof(ctx context.Context, in *GetProofRequest, opts ...grpc.CallOption) (*GetProofResponse, error) GetChangeProof(ctx context.Context, in *GetChangeProofRequest, opts ...grpc.CallOption) (*GetChangeProofResponse, error) VerifyChangeProof(ctx context.Context, in *VerifyChangeProofRequest, opts ...grpc.CallOption) (*VerifyChangeProofResponse, error) CommitChangeProof(ctx context.Context, in *CommitChangeProofRequest, opts ...grpc.CallOption) (*emptypb.Empty, error) @@ -72,15 +70,6 @@ func (c *dBClient) Clear(ctx context.Context, in *emptypb.Empty, opts ...grpc.Ca return out, nil } -func (c *dBClient) GetProof(ctx context.Context, in *GetProofRequest, opts ...grpc.CallOption) (*GetProofResponse, error) { - out := new(GetProofResponse) - err := c.cc.Invoke(ctx, DB_GetProof_FullMethodName, in, out, opts...) - if err != nil { - return nil, err - } - return out, nil -} - func (c *dBClient) GetChangeProof(ctx context.Context, in *GetChangeProofRequest, opts ...grpc.CallOption) (*GetChangeProofResponse, error) { out := new(GetChangeProofResponse) err := c.cc.Invoke(ctx, DB_GetChangeProof_FullMethodName, in, out, opts...) @@ -141,7 +130,6 @@ func (c *dBClient) CommitRangeProof(ctx context.Context, in *CommitRangeProofReq type DBServer interface { GetMerkleRoot(context.Context, *emptypb.Empty) (*GetMerkleRootResponse, error) Clear(context.Context, *emptypb.Empty) (*emptypb.Empty, error) - GetProof(context.Context, *GetProofRequest) (*GetProofResponse, error) GetChangeProof(context.Context, *GetChangeProofRequest) (*GetChangeProofResponse, error) VerifyChangeProof(context.Context, *VerifyChangeProofRequest) (*VerifyChangeProofResponse, error) CommitChangeProof(context.Context, *CommitChangeProofRequest) (*emptypb.Empty, error) @@ -161,9 +149,6 @@ func (UnimplementedDBServer) GetMerkleRoot(context.Context, *emptypb.Empty) (*Ge func (UnimplementedDBServer) Clear(context.Context, *emptypb.Empty) (*emptypb.Empty, error) { return nil, status.Errorf(codes.Unimplemented, "method Clear not implemented") } -func (UnimplementedDBServer) GetProof(context.Context, *GetProofRequest) (*GetProofResponse, error) { - return nil, status.Errorf(codes.Unimplemented, "method GetProof not implemented") -} func (UnimplementedDBServer) GetChangeProof(context.Context, *GetChangeProofRequest) (*GetChangeProofResponse, error) { return nil, status.Errorf(codes.Unimplemented, "method GetChangeProof not implemented") } @@ -231,24 +216,6 @@ func _DB_Clear_Handler(srv interface{}, ctx context.Context, dec func(interface{ return interceptor(ctx, in, info, handler) } -func _DB_GetProof_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { - in := new(GetProofRequest) - if err := dec(in); err != nil { - return nil, err - } - if interceptor == nil { - return srv.(DBServer).GetProof(ctx, in) - } - info := &grpc.UnaryServerInfo{ - Server: srv, - FullMethod: DB_GetProof_FullMethodName, - } - handler := func(ctx context.Context, req interface{}) (interface{}, error) { - return srv.(DBServer).GetProof(ctx, req.(*GetProofRequest)) - } - return interceptor(ctx, in, info, handler) -} - func _DB_GetChangeProof_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) { in := new(GetChangeProofRequest) if err := dec(in); err != nil { @@ -372,10 +339,6 @@ var DB_ServiceDesc = grpc.ServiceDesc{ MethodName: "Clear", Handler: _DB_Clear_Handler, }, - { - MethodName: "GetProof", - Handler: _DB_GetProof_Handler, - }, { MethodName: "GetChangeProof", Handler: _DB_GetChangeProof_Handler, diff --git a/proto/sync/sync.proto b/proto/sync/sync.proto index f5cf2677feae..466d4d78b84b 100644 --- a/proto/sync/sync.proto +++ b/proto/sync/sync.proto @@ -15,8 +15,6 @@ service DB { rpc Clear(google.protobuf.Empty) returns (google.protobuf.Empty); - rpc GetProof(GetProofRequest) returns (GetProofResponse); - rpc GetChangeProof(GetChangeProofRequest) returns (GetChangeProofResponse); rpc VerifyChangeProof(VerifyChangeProofRequest) returns (VerifyChangeProofResponse); rpc CommitChangeProof(CommitChangeProofRequest) returns (google.protobuf.Empty); @@ -30,20 +28,6 @@ message GetMerkleRootResponse { bytes root_hash = 1; } -message GetProofRequest { - bytes key = 1; -} - -message GetProofResponse { - Proof proof = 1; -} - -message Proof { - bytes key = 1; - MaybeBytes value = 2; - repeated ProofNode proof = 3; -} - // For use in sync client, which has a restriction on the size of // the response. GetChangeProof in the DB service doesn't. message SyncGetChangeProofRequest { diff --git a/scripts/build_test.sh b/scripts/build_test.sh index 1511c351782a..d6964a49f0e7 100755 --- a/scripts/build_test.sh +++ b/scripts/build_test.sh @@ -18,4 +18,5 @@ fi TEST_TARGETS="$(eval "go list ./... ${EXCLUDED_TARGETS}")" # shellcheck disable=SC2086 +TIMEOUT=900s go test -tags test -shuffle=on -race -timeout="${TIMEOUT:-120s}" -coverprofile="coverage.out" -covermode="atomic" ${TEST_TARGETS} diff --git a/x/ethsync/database.go b/x/ethsync/database.go new file mode 100644 index 000000000000..ad636fd4ad2e --- /dev/null +++ b/x/ethsync/database.go @@ -0,0 +1,53 @@ +// (c) 2019-2020, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package ethsync + +import ( + "context" + + "github.com/ava-labs/avalanchego/database" + "github.com/ethereum/go-ethereum/ethdb" +) + +var ( + _ database.Database = &Database{} +) + +type Database struct{ ethdb.Database } + +func (db Database) HealthCheck(context.Context) (interface{}, error) { + return nil, nil +} + +func (db Database) NewBatch() database.Batch { + return batch{db.Database.NewBatch()} +} + +func (db Database) NewIterator() database.Iterator { + return db.Database.NewIterator(nil, nil) +} + +func (db Database) NewIteratorWithPrefix(prefix []byte) database.Iterator { + return db.Database.NewIterator(prefix, nil) +} + +func (db Database) NewIteratorWithStart(start []byte) database.Iterator { + return db.Database.NewIterator(nil, start) +} + +func (db Database) NewIteratorWithStartAndPrefix(prefix, start []byte) database.Iterator { + return db.Database.NewIterator(prefix, start) +} + +type batch struct{ ethdb.Batch } + +func (b batch) Inner() database.Batch { return b } + +func (b batch) Replay(w database.KeyValueWriterDeleter) error { + return b.Batch.Replay(w) +} + +func (b batch) Size() int { + return b.Batch.ValueSize() +} diff --git a/x/ethsync/db.go b/x/ethsync/db.go new file mode 100644 index 000000000000..1552f3337ad0 --- /dev/null +++ b/x/ethsync/db.go @@ -0,0 +1,920 @@ +// (c) 2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package ethsync + +import ( + "bytes" + "context" + "encoding/hex" + "fmt" + "os" + "slices" + "sync" + + "github.com/ava-labs/avalanchego/database" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/utils/maybe" + "github.com/ava-labs/avalanchego/x/merkledb" + "github.com/ava-labs/coreth/core/rawdb" + "github.com/ava-labs/coreth/core/types" + "github.com/ava-labs/coreth/plugin/evm" + "github.com/ava-labs/coreth/trie" + "github.com/ava-labs/coreth/trie/trienode" + "github.com/ava-labs/coreth/triedb" + "github.com/ava-labs/coreth/triedb/pathdb" + "github.com/ethereum/go-ethereum/common" + "github.com/ethereum/go-ethereum/ethdb" + "github.com/ethereum/go-ethereum/rlp" + "golang.org/x/crypto/sha3" +) + +type ( + ChangeProof = merkledb.ChangeProof + RangeProof = merkledb.RangeProof + Key = merkledb.Key + ProofNode = merkledb.ProofNode + KeyValue = merkledb.KeyValue +) + +var ( + ToKey = merkledb.ToKey + ErrStartAfterEnd = merkledb.ErrStartAfterEnd + ErrEmptyProof = merkledb.ErrEmptyProof + ErrUnexpectedEndProof = merkledb.ErrUnexpectedEndProof + ErrNoStartProof = merkledb.ErrNoStartProof + ErrNoEndProof = merkledb.ErrNoEndProof + ErrProofNodeHasUnincludedValue = merkledb.ErrProofNodeHasUnincludedValue + ErrProofValueDoesntMatch = merkledb.ErrProofValueDoesntMatch + ErrPartialByteLengthWithValue = merkledb.ErrPartialByteLengthWithValue + ErrProofNodeNotForKey = merkledb.ErrProofNodeNotForKey + ErrNonIncreasingProofNodes = merkledb.ErrNonIncreasingProofNodes + ErrNonIncreasingValues = merkledb.ErrNonIncreasingValues + ErrStateFromOutsideOfRange = merkledb.ErrStateFromOutsideOfRange +) + +const ( + HashLength = merkledb.HashLength +) + +var rootKey = []byte{} + +type manager interface { + EnqueueWork(start, end maybe.Maybe[[]byte], priorityAsByte byte) +} + +type db struct { + db ethdb.Database + triedb *triedb.Database + root common.Hash + layerID common.Hash + updateLock sync.RWMutex + lastRoots map[common.Hash]common.Hash + manager manager +} + +func New(ctx context.Context, disk database.Database, config merkledb.Config) (*db, error) { + ethdb := rawdb.NewDatabase(evm.Database{Database: disk}) + root := types.EmptyRootHash + diskRoot, err := ethdb.Get(rootKey) + if err == nil { + copy(root[:], diskRoot) + } else if err != database.ErrNotFound { + return nil, err + } + + triedb := triedb.NewDatabase(ethdb, &triedb.Config{PathDB: pathdb.Defaults}) + return &db{ + db: ethdb, + triedb: triedb, + root: root, + layerID: root, + lastRoots: make(map[common.Hash]common.Hash), + }, nil +} + +func (db *db) KVCallback(start, end maybe.Maybe[[]byte], priority byte, stateID ids.ID, keyValues []merkledb.KeyChange) error { + if len(start.Value()) == 64 { + return nil // Storage trie, ignore + } + for _, kv := range keyValues { + acc := new(types.StateAccount) + if err := rlp.DecodeBytes(kv.Value.Value(), acc); err != nil { + continue // failed to decode account + } + if acc.Root == types.EmptyRootHash { + continue // empty account + } + + if db.manager != nil { + var start, end []byte + start = append(start, kv.Key...) + start = append(start, bytes.Repeat([]byte{0}, 32)...) + end = append(end, kv.Key...) + end = append(end, bytes.Repeat([]byte{0xff}, 32)...) + db.manager.EnqueueWork(maybe.Some(start), maybe.Some(end), priority) + } + } + return nil +} + +func (db *db) Clear() error { + return database.Clear(Database{db.db}, ethdb.IdealBatchSize) +} + +func (db *db) GetMerkleRoot(ctx context.Context) (ids.ID, error) { + db.updateLock.RLock() + defer db.updateLock.RUnlock() + + if db.root == types.EmptyRootHash { + return ids.ID{}, nil + } + return ids.ID(db.root), nil +} + +func (db *db) GetRangeProofAtRoot(ctx context.Context, rootID ids.ID, start, end maybe.Maybe[[]byte], maxLength int) (*RangeProof, error) { + stateRoot := common.BytesToHash(rootID[:]) + tr, err := trie.New(trie.StateTrieID(stateRoot), db.triedb) + if err != nil { + return nil, err + } + var additionalProof proof + if len(start.Value()) == 64 { + // This is a storage trie + accHash := start.Value()[:32] + accBytes, err := tr.Get(accHash) + if err != nil { + return nil, err + } + acc := new(types.StateAccount) + if err := rlp.DecodeBytes(accBytes, acc); err != nil { + return nil, err + } + + // TODO: to prove the account root, we include this for now. + // The client can find a better way to track this. + if err := tr.Prove(accHash, &additionalProof); err != nil { + return nil, err + } + + tr, err = trie.New(trie.StorageTrieID(stateRoot, common.BytesToHash(accHash), acc.Root), db.triedb) + if err != nil { + return nil, err + } + + start = maybe.Some(start.Value()[32:]) + if end.HasValue() { + if len(end.Value()) != 64 { + return nil, fmt.Errorf("invalid end key length: %d", len(end.Value())) + } + if !bytes.Equal(end.Value()[:32], accHash) { + return nil, fmt.Errorf("end key does not match account hash: %x != %x", end.Value()[:32], accHash) + } + end = maybe.Some(end.Value()[32:]) + } + } + response, err := db.getRangeProofAtRoot(ctx, tr, rootID, start, end, maxLength) + if err != nil { + return nil, err + } + response.StartProof = append(response.StartProof, additionalProof...) + return response, nil +} + +func (db *db) getRangeProofAtRoot( + ctx context.Context, tr *trie.Trie, + rootID ids.ID, start, end maybe.Maybe[[]byte], maxLength int) (*RangeProof, error) { + response := &RangeProof{} + nodeIt, err := tr.NodeIterator(start.Value()) + if err != nil { + return nil, err + } + it := trie.NewIterator(nodeIt) + for it.Next() { + if end.HasValue() && bytes.Compare(it.Key, end.Value()) > 0 { + break + } + if len(response.KeyValues) >= maxLength { + break + } + + response.KeyValues = append(response.KeyValues, KeyValue{ + Key: bytes.Clone(it.Key), + Value: bytes.Clone(it.Value), + }) + } + if err := it.Err; err != nil { + return nil, err + } + for i, it := range response.KeyValues { + if len(it.Key) == 0 { + panic("empty key at index " + fmt.Sprintf("%d", i)) + } + if len(it.Value) == 0 { + panic("empty value") + } + // fmt.Println("--> kv", i, hex.EncodeToString(it.Key), hex.EncodeToString(it.Value)) + } + + startKey := start.Value() + if len(startKey) == 0 && len(response.KeyValues) > 0 { + startKey = bytes.Repeat([]byte{0}, 32) // XXX + } + if err := tr.Prove(startKey, (*proof)(&response.StartProof)); err != nil { + return nil, err + } + kvs := response.KeyValues + if len(kvs) > 0 { + // If there is a non-zero number of keys, set [end] for the range proof to the last key. + end := kvs[len(kvs)-1].Key + if err := tr.Prove(end, (*proof)(&response.EndProof)); err != nil { + return nil, err + } + } else if end.HasValue() { + // If there are no keys, and [end] is set, set [end] for the range proof to [end]. + if err := tr.Prove(end.Value(), (*proof)(&response.EndProof)); err != nil { + return nil, err + } + } + + startLen := len(start.Value()) + if startLen != 0 && startLen != 32 { + panic("invalid start key length") + } + + fmt.Println("proof generated", + "start", hex.EncodeToString(start.Value()), + "end", hex.EncodeToString(end.Value()), + "startProof", len(response.StartProof), + "endProof", len(response.EndProof), + "keyValues", len(response.KeyValues), + ) + return response, nil +} + +func (db *db) VerifyRangeProof(ctx context.Context, proof *RangeProof, start, end maybe.Maybe[[]byte], expectedRootID ids.ID) error { + var prefix []byte + verifyRootID := expectedRootID + if len(start.Value()) == 64 { + prefix = start.Value()[:32] + start = maybe.Some(start.Value()[32:]) + if end.HasValue() { + end = maybe.Some(end.Value()[32:]) + } + + // If the proof is for a storage trie, the expected root ID should be + // recovered from the "additionalProof", included in StartProof. + proofDB := rawdb.NewMemoryDatabase() + for _, node := range proof.StartProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + val, err := trie.VerifyProof( + common.BytesToHash(expectedRootID[:]), + prefix, + proofDB, + ) + if err != nil { + return fmt.Errorf("failed to verify proof: %w", err) + } + account := new(types.StateAccount) + if err := rlp.DecodeBytes(val, account); err != nil { + return fmt.Errorf("failed to decode account: %w", err) + } + + verifyRootID = ids.ID(account.Root) + } + return db.verifyRangeProof(ctx, prefix, proof, start, end, verifyRootID) +} + +func (db *db) verifyRangeProof(ctx context.Context, prefix []byte, proof *RangeProof, start, end maybe.Maybe[[]byte], expectedRootID ids.ID) error { + fmt.Println( + "proof verification", + "start", hex.EncodeToString(start.Value()), + "end", hex.EncodeToString(end.Value()), + "expectedRootID", expectedRootID, + "kvs", len(proof.KeyValues), + ) + + proofDB := rawdb.NewMemoryDatabase() + for _, node := range proof.StartProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + for _, node := range proof.EndProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + keys := make([][]byte, 0, len(proof.KeyValues)) + vals := make([][]byte, 0, len(proof.KeyValues)) + for i := range proof.KeyValues { + if len(proof.KeyValues[i].Key) == 0 || len(proof.KeyValues[i].Value) == 0 { + return fmt.Errorf("invalid key-value pair at index %d", i) + } + keys = append(keys, proof.KeyValues[i].Key) + vals = append(vals, proof.KeyValues[i].Value) + } + root := common.BytesToHash(expectedRootID[:]) + + startKey := start.Value() + if len(startKey) == 0 && len(keys) > 0 { + startKey = bytes.Repeat([]byte{0}, 32) // XXX + } + if len(keys) == 0 && end.HasValue() { + if err := trie.VerifyRangeProofEmpty(root, startKey, end.Value(), proofDB); err != nil { + fmt.Println("proof verification failed empty", err) + return err + } + } else { + if _, err := trie.VerifyRangeProof(root, startKey, keys, vals, proofDB); err != nil { + fmt.Println("proof verification failed", err) + return err + } + } + fmt.Println("proof verified") + return nil +} + +func (db *db) CommitRangeProof(ctx context.Context, start, end maybe.Maybe[[]byte], proof *RangeProof) error { + var prefix []byte + if len(start.Value()) == 64 { + prefix = start.Value()[:32] + start = maybe.Some(start.Value()[32:]) + if end.HasValue() { + end = maybe.Some(end.Value()[32:]) + } + } + return db.commitRangeProof(ctx, prefix, start, end, proof) +} + +func (db *db) commitRangeProof(ctx context.Context, prefix []byte, start, end maybe.Maybe[[]byte], proof *RangeProof) error { + kvs := make(map[string][]byte) + if err := db.getKVs(prefix, start.Value(), end.Value(), kvs); err != nil { + return err + } + deletes := make([]KeyValue, 0, len(kvs)) + for k := range kvs { + deletes = append(deletes, KeyValue{ + Key: []byte(k), + Value: nil, // Delete + }) + } + if len(deletes) > 0 { + fmt.Println("deleting", len(deletes), "keys") + if err := db.updateKVs(prefix, deletes); err != nil { + return err + } + } + + return db.updateKVs(prefix, proof.KeyValues) +} + +func (db *db) NextKey(lastReceived []byte, rangeEnd maybe.Maybe[[]byte]) ([]byte, error) { + fmt.Println( + "next key", + "lastReceived", hex.EncodeToString(lastReceived), + "rangeEnd", hex.EncodeToString(rangeEnd.Value()), + ) + if len(rangeEnd.Value()) == 64 { + prefix := rangeEnd.Value()[:32] + next, err := db.NextKey(lastReceived, maybe.Nothing[[]byte]()) + if err != nil { + return nil, err + } + retval := make([]byte, 0, len(prefix)+len(next)) + retval = append(retval, prefix...) + retval = append(retval, next...) + fmt.Println("next key", hex.EncodeToString(retval)) + return retval, nil + } + if len(lastReceived) != 32 { + return nil, fmt.Errorf("key length is not 32: %d", len(lastReceived)) + } + keyCopy := bytes.Clone(lastReceived) + IncrOne(keyCopy) + return keyCopy, nil +} + +func (db *db) updateKVs(prefix []byte, kvs []merkledb.KeyValue) error { + db.updateLock.Lock() + defer db.updateLock.Unlock() + + fmt.Println( + "updating", + "prefix", hex.EncodeToString(prefix), + "kvs", len(kvs), + ) + + trID := db.getTrieID(prefix) + tr, err := trie.New(trID, db.triedb) + if err != nil { + return err + } + for _, op := range kvs { + if len(op.Value) == 0 { + if err := tr.Delete(op.Key); err != nil { + return err + } + } else { + if err := tr.Update(op.Key, op.Value); err != nil { + return err + } + } + } + root, nodeSet, err := tr.Commit(true) + if err != nil { + return err + } + fmt.Fprintln( + os.Stderr, + "committing", len(kvs), + "parent", hex.EncodeToString(db.layerID[:]), + "root", hex.EncodeToString(root[:]), + "trID.Root", hex.EncodeToString(trID.Root[:]), + ) + if root == trID.Root { + return nil + } + nodes := trienode.NewWithNodeSet(nodeSet) + layerID := root + if len(prefix) > 0 { + // Just guarantees uniqueness + hasher := sha3.NewLegacyKeccak256() + hasher.Write(db.layerID[:]) + hasher.Write(root[:]) + layerID = common.BytesToHash(hasher.Sum(nil)) + } + if err := db.triedb.Update(layerID, db.layerID, 0, nodes, nil); err != nil { + return err + } + if len(prefix) == 0 { + db.root = root + fmt.Println("root updated", hex.EncodeToString(db.root[:])) + } + db.layerID = layerID + db.lastRoots[common.BytesToHash(prefix)] = root + return nil +} + +func (db *db) Close() error { + if err := db.triedb.Commit(db.layerID, false); err != nil { + return err + } + if db.layerID != db.root { + nodes := trienode.NewMergedNodeSet() + if err := db.triedb.Update(db.root, db.layerID, 0, nodes, nil); err != nil { + return err + } + if err := db.triedb.Commit(db.root, false); err != nil { + return err + } + } + return db.db.Put(rootKey, db.root[:]) +} + +func (db *db) GetChangeProof(ctx context.Context, startRootID, endRootID ids.ID, start, end maybe.Maybe[[]byte], maxLength int) (*ChangeProof, error) { + startStateRoot := common.BytesToHash(startRootID[:]) + startTrie, err := trie.New(trie.StateTrieID(startStateRoot), db.triedb) + if err != nil { + return nil, merkledb.ErrInsufficientHistory + } + endStateRoot := common.BytesToHash(endRootID[:]) + endTrie, err := trie.New(trie.StateTrieID(endStateRoot), db.triedb) + if err != nil { + return nil, merkledb.ErrNoEndRoot + } + var additionalProof proof + if len(start.Value()) == 64 { + // This is a storage trie + accHash := start.Value()[:32] + startAccBytes, err := startTrie.Get(accHash) + if err != nil { + return nil, err + } + startAcc := new(types.StateAccount) + if err := rlp.DecodeBytes(startAccBytes, startAcc); err != nil { + return nil, err + } + + endAccBytes, err := endTrie.Get(accHash) + if err != nil { + return nil, err + } + endAcc := new(types.StateAccount) + if err := rlp.DecodeBytes(endAccBytes, endAcc); err != nil { + return nil, err + } + + // TODO: to prove the account root, we include this for now. + // The client can find a better way to track this. + if err := endTrie.Prove(accHash, (*proof)(&additionalProof)); err != nil { + return nil, err + } + + startTrie, err = trie.New(trie.StorageTrieID(startStateRoot, common.BytesToHash(accHash), startAcc.Root), db.triedb) + if err != nil { + return nil, merkledb.ErrInsufficientHistory + } + endTrie, err = trie.New(trie.StorageTrieID(endStateRoot, common.BytesToHash(accHash), endAcc.Root), db.triedb) + if err != nil { + return nil, merkledb.ErrNoEndRoot + } + + if end.HasValue() { + if len(end.Value()) != 64 { + return nil, fmt.Errorf("invalid end key length: %d", len(end.Value())) + } + if !bytes.Equal(end.Value()[:32], accHash) { + return nil, fmt.Errorf("end key does not match account hash: %x != %x", end.Value()[:32], accHash) + } + end = maybe.Some(end.Value()[32:]) + } + start = maybe.Some(start.Value()[32:]) + } + + response, err := db.getChangeProof(ctx, startTrie, endTrie, startRootID, endRootID, start, end, maxLength) + if err != nil { + return nil, err + } + response.StartProof = append(response.StartProof, additionalProof...) + return response, nil +} + +func (db *db) getChangeProof( + ctx context.Context, startTrie, endTrie *trie.Trie, + startRootID, endRootID ids.ID, start, end maybe.Maybe[[]byte], maxLength int) (*ChangeProof, error) { + startIt, err := startTrie.NodeIterator(start.Value()) + if err != nil { + return nil, fmt.Errorf("failed to create start iterator: %w", err) + } + endIt, err := endTrie.NodeIterator(start.Value()) + if err != nil { + return nil, fmt.Errorf("failed to create end iterator: %w", err) + } + + startToEnd, _ := trie.NewDifferenceIterator(startIt, endIt) + + startIt, err = startTrie.NodeIterator(start.Value()) + if err != nil { + return nil, fmt.Errorf("failed to create start iterator: %w", err) + } + endIt, err = endTrie.NodeIterator(start.Value()) + if err != nil { + return nil, fmt.Errorf("failed to create end iterator: %w", err) + } + endToStart, _ := trie.NewDifferenceIterator(endIt, startIt) + + unionIt, _ := trie.NewUnionIterator([]trie.NodeIterator{startToEnd, endToStart}) + it := trie.NewIterator(unionIt) + + response := &ChangeProof{} + for it.Next() { + if len(response.KeyChanges) >= maxLength { + break + } + if end.HasValue() && bytes.Compare(it.Key, end.Value()) > 0 { + break + } + current, err := endTrie.Get(it.Key) + if err != nil { + current = nil + if _, ok := err.(*trie.MissingNodeError); !ok { + return nil, err + } + } + currentVal := maybe.Nothing[[]byte]() + if len(current) > 0 { + currentVal = maybe.Some(current) + } + response.KeyChanges = append(response.KeyChanges, merkledb.KeyChange{ + Key: bytes.Clone(it.Key), + Value: currentVal, + }) + //fmt.Println("--> CHANGE kv", hex.EncodeToString(it.Key), hex.EncodeToString(current)) + } + + // for i, it := range response.KeyChanges { + // fmt.Println("--> CHANGE kv", i, hex.EncodeToString(it.Key), hex.EncodeToString(it.Value.Value())) + // } + startKey := start.Value() + if len(startKey) == 0 && (len(response.KeyChanges) > 0 || end.HasValue()) { + startKey = bytes.Repeat([]byte{0}, 32) // XXX + } + + /////// + // Sanity check + ////// + endKey := end.Value() + if len(response.KeyChanges) > 0 { + endKey = response.KeyChanges[len(response.KeyChanges)-1].Key + } + keyVals := make(map[string][]byte) + if err := getKVsFromTrie(startTrie, startKey, endKey, keyVals); err != nil { + panic("failed to get key-values") + } + for _, kv := range response.KeyChanges { + val := kv.Value.Value() + if len(val) == 0 { + delete(keyVals, string(kv.Key)) + continue + } + keyVals[string(kv.Key)] = val + } + + // Now get them from the end trie + keyValsEnd := make(map[string][]byte) + if err := getKVsFromTrie(endTrie, startKey, endKey, keyValsEnd); err != nil { + panic("failed to get key-values") + } + // Make sure the key-values are the same + if len(keyVals) != len(keyValsEnd) { + panic("key-values don't match") + } + for key, val := range keyVals { + if !bytes.Equal(val, keyValsEnd[key]) { + panic("key-values don't match") + } + } + ///// + + tr := endTrie + if err := tr.Prove(startKey, (*proof)(&response.StartProof)); err != nil { + return nil, err + } + endProofKey := []byte{} + if len(response.KeyChanges) > 0 { + // If there is a non-zero number of keys, set [end] for the range proof to the last key. + endProofKey = response.KeyChanges[len(response.KeyChanges)-1].Key + } else if end.HasValue() { // is empty + // If there are no keys, and [end] is set, set [end] for the range proof to [end]. + endProofKey = end.Value() + } + if err := tr.Prove(endProofKey, (*proof)(&response.EndProof)); err != nil { + return nil, err + } + fmt.Println("CHANGE proof generated", + "start", hex.EncodeToString(startKey), + "end", hex.EncodeToString(end.Value()), + "endProofKey", hex.EncodeToString(endProofKey), + "startProof", len(response.StartProof), + "endProof", len(response.EndProof), + "keyValues", len(response.KeyChanges), + ) + return response, nil +} + +func (db *db) VerifyChangeProof(ctx context.Context, proof *ChangeProof, start, end maybe.Maybe[[]byte], expectedEndRootID ids.ID) error { + var prefix []byte + verifyRootID := expectedEndRootID + if len(start.Value()) == 64 { + prefix = start.Value()[:32] + start = maybe.Some(start.Value()[32:]) + if end.HasValue() { + end = maybe.Some(end.Value()[32:]) + } + + // If the proof is for a storage trie, the expected root ID should be + // recovered from the "additionalProof", included in StartProof. + proofDB := rawdb.NewMemoryDatabase() + for _, node := range proof.StartProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + val, err := trie.VerifyProof( + common.BytesToHash(expectedEndRootID[:]), + prefix, + proofDB, + ) + if err != nil { + return fmt.Errorf("failed to verify proof: %w", err) + } + account := new(types.StateAccount) + if err := rlp.DecodeBytes(val, account); err != nil { + return fmt.Errorf("failed to decode account: %w", err) + } + + verifyRootID = ids.ID(account.Root) + } + + if err := db.verifyChangeProof(ctx, prefix, proof, start, end, verifyRootID); err != nil { + return fmt.Errorf("failed to verify change proof: %w", err) + } + // HACK: tacking on start + if len(proof.KeyChanges) > 0 { + proof.KeyChanges = append( + proof.KeyChanges, + merkledb.KeyChange{Key: []byte("PREFIX"), Value: maybe.Some(prefix)}, + ) + } + /// + return nil +} + +func (db *db) verifyChangeProof( + ctx context.Context, prefix []byte, + proof *ChangeProof, start, end maybe.Maybe[[]byte], expectedEndRootID ids.ID) error { + fmt.Println( + "change proof verification", + "start", hex.EncodeToString(start.Value()), + "end", hex.EncodeToString(end.Value()), + "expectedRootID", expectedEndRootID, + "kvs", len(proof.KeyChanges), + ) + switch { + case start.HasValue() && end.HasValue() && bytes.Compare(start.Value(), end.Value()) > 0: + return ErrStartAfterEnd + case proof.Empty(): + return ErrEmptyProof + case end.HasValue() && len(proof.KeyChanges) == 0 && len(proof.EndProof) == 0: + // We requested an end proof but didn't get one. + return ErrNoEndProof + case start.HasValue() && len(proof.StartProof) == 0 && len(proof.EndProof) == 0: + // We requested a start proof but didn't get one. + // Note that we also have to check that [proof.EndProof] is empty + // to handle the case that the start proof is empty because all + // its nodes are also in the end proof, and those nodes are omitted. + return ErrNoStartProof + } + + proofDB := rawdb.NewMemoryDatabase() + for _, node := range proof.StartProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + for _, node := range proof.EndProof { + if err := proofDB.Put(node.Key.Bytes(), node.ValueOrHash.Value()); err != nil { + return err + } + } + + startKey := start.Value() + if len(startKey) == 0 { + startKey = bytes.Repeat([]byte{0}, 32) // XXX + } + + kvs := make(map[string][]byte) + endOfProofRange := end.Value() + if len(proof.KeyChanges) > 0 { + endOfProofRange = proof.KeyChanges[len(proof.KeyChanges)-1].Key + } + if err := db.getKVs(prefix, startKey, endOfProofRange, kvs); err != nil { + return fmt.Errorf("failed to get key-values: %w", err) + } + fmt.Println("kvs", len(kvs)) + for _, kv := range proof.KeyChanges { + if len(kv.Key) != 32 { + return fmt.Errorf("invalid key length: %d", len(kv.Key)) + } + val := kv.Value.Value() + + kvs[string(kv.Key)] = val + } + + keys := make([][]byte, 0, len(kvs)) + for key := range kvs { + keys = append(keys, []byte(key)) + } + slices.SortFunc(keys, bytes.Compare) + vals := make([][]byte, len(keys)) + for i, key := range keys { + vals[i] = kvs[string(key)] + } + + root := common.BytesToHash(expectedEndRootID[:]) + if len(kvs) == 0 && end.HasValue() { // XXX: should have proper isEmpty + if err := trie.VerifyRangeProofEmpty(root, startKey, end.Value(), proofDB); err != nil { + fmt.Println("proof verification failed empty", err) + return err + } + } else { + if len(proof.KeyChanges) == 0 && len(kvs) > 0 { + // No changes here, but we have some KVs in the tree. + // The proof was created with end.Value() as the end of the range. + if end.HasValue() { + lastKey := keys[len(keys)-1] + // This means end.Value() is not in the trie. + // So, we can consider it as a deletion. + if bytes.Compare(lastKey, end.Value()) < 0 { + keys = append(keys, end.Value()) + vals = append(vals, nil) + } + } + } + if _, err := trie.VerifyRangeProofAllowDeletions(root, startKey, keys, vals, proofDB); err != nil { + fmt.Println("proof verification failed", err, "kvs", len(keys)) + return err + } + } + + fmt.Println( + "change proof verification SUCCESS", + "start", hex.EncodeToString(start.Value()), + "end", hex.EncodeToString(end.Value()), + "expectedRootID", expectedEndRootID, + "kvs", len(proof.KeyChanges), + ) + return nil +} + +// assumes updateLock is held +func (db *db) getTrieID(prefix []byte) *trie.ID { + owner := common.BytesToHash(prefix) + root := db.lastRoots[owner] + if root == (common.Hash{}) { + root = types.EmptyRootHash + } + return trie.StorageTrieID(db.layerID, owner, root) +} + +func (db *db) getKVs(prefix []byte, startKey, endKey []byte, kvs map[string][]byte) error { + db.updateLock.RLock() + defer db.updateLock.RUnlock() + + tr, err := trie.New(db.getTrieID(prefix), db.triedb) + if err != nil { + return err + } + return getKVsFromTrie(tr, startKey, endKey, kvs) +} + +func getKVsFromTrie(tr *trie.Trie, startKey, endKey []byte, kvs map[string][]byte) error { + nodeIt, err := tr.NodeIterator(startKey) + if err != nil { + return err + } + it := trie.NewIterator(nodeIt) + for it.Next() { + if len(endKey) > 0 && bytes.Compare(it.Key, endKey) > 0 { + break + } + kvs[string(it.Key)] = bytes.Clone(it.Value) + } + return nil +} + +func (db *db) CommitChangeProof(ctx context.Context, proof *ChangeProof) error { + // UNHACK: remove prefix + last := proof.KeyChanges[len(proof.KeyChanges)-1] + if !bytes.Equal(last.Key, []byte("PREFIX")) { + panic("invalid prefix") + } + prefix := last.Value.Value() + proof.KeyChanges = proof.KeyChanges[:len(proof.KeyChanges)-1] + /// + + keyValues := make([]KeyValue, 0, len(proof.KeyChanges)) + for _, change := range proof.KeyChanges { + keyValues = append(keyValues, KeyValue{ + Key: change.Key, + Value: change.Value.Value(), + }) + } + + return db.updateKVs(prefix, keyValues) +} + +func (db *db) Put(key, value []byte) error { + return db.updateKVs(nil, []KeyValue{{Key: key, Value: value}}) +} + +func (db *db) NewBatch() *trieBatch { + return &trieBatch{db: db} +} + +type trieBatch struct { + db *db + kvs []KeyValue +} + +func (t *trieBatch) Put(key, value []byte) error { + t.kvs = append(t.kvs, KeyValue{Key: key, Value: value}) + return nil +} + +func (t *trieBatch) Write() error { + return t.db.updateKVs(nil, t.kvs) +} + +func (db *db) IterateOneKey(key []byte) ([]byte, bool) { + db.updateLock.RLock() + defer db.updateLock.RUnlock() + + tr, err := trie.New(trie.StateTrieID(db.root), db.triedb) + if err != nil { + panic("failed to create trie") + } + nodeIt, err := tr.NodeIterator(key) + if err != nil { + panic("failed to create iterator") + } + it := trie.NewIterator(nodeIt) + if !it.Next() { + return nil, false + } + return it.Key, true +} diff --git a/x/ethsync/db_test.go b/x/ethsync/db_test.go new file mode 100644 index 000000000000..10287732357b --- /dev/null +++ b/x/ethsync/db_test.go @@ -0,0 +1,69 @@ +// (c) 2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package ethsync + +import ( + "context" + "testing" + + "github.com/ava-labs/avalanchego/database/memdb" + "github.com/ava-labs/avalanchego/x/merkledb" + "github.com/ethereum/go-ethereum/common" + "github.com/stretchr/testify/require" +) + +func TestUpdateKVsWithPrefix(t *testing.T) { + require := require.New(t) + ctx := context.Background() + + disk := memdb.New() + db, err := New(ctx, disk, merkledb.Config{}) + require.NoError(err) + _ = db + + ops := []merkledb.KeyValue{ + {Key: []byte("a"), Value: []byte("1")}, + {Key: []byte("b"), Value: []byte("2")}, + {Key: []byte("c"), Value: []byte("3")}, + {Key: []byte("d"), Value: []byte("4")}, + } + err = db.updateKVs(nil, ops) + require.NoError(err) + + ops2 := []merkledb.KeyValue{ + {Key: []byte("A"), Value: []byte("1")}, + {Key: []byte("B"), Value: []byte("2")}, + {Key: []byte("C"), Value: []byte("3")}, + } + accHash := common.Hash{0x01} + err = db.updateKVs(accHash.Bytes(), ops2) + require.NoError(err) + + ops3 := []merkledb.KeyValue{ + {Key: []byte("e"), Value: []byte("5")}, + {Key: []byte("f"), Value: []byte("6")}, + } + err = db.updateKVs(accHash.Bytes(), ops3) + require.NoError(err) + + ops4 := []merkledb.KeyValue{ + {Key: []byte("g"), Value: []byte("7")}, + {Key: []byte("h"), Value: []byte("8")}, + } + err = db.updateKVs(nil, ops4) + require.NoError(err) + + kvs0 := make(map[string][]byte) + require.NoError(db.getKVs(nil, nil, nil, kvs0)) + for k, v := range kvs0 { + t.Logf("k: %s, v: %s", k, v) + } + + t.Log("=====================================") + kvs1 := make(map[string][]byte) + require.NoError(db.getKVs(accHash.Bytes(), nil, nil, kvs1)) + for k, v := range kvs1 { + t.Logf("k: %s, v: %s", k, v) + } +} diff --git a/x/ethsync/sync_test.go b/x/ethsync/sync_test.go new file mode 100644 index 000000000000..b89e8c0f31f3 --- /dev/null +++ b/x/ethsync/sync_test.go @@ -0,0 +1,190 @@ +// (c) 2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package ethsync + +import ( + "context" + "encoding/binary" + "os" + "testing" + "time" + + "github.com/ava-labs/avalanchego/database/memdb" + "github.com/ava-labs/avalanchego/ids" + "github.com/ava-labs/avalanchego/network/p2p/p2ptest" + "github.com/ava-labs/avalanchego/utils/logging" + "github.com/ava-labs/avalanchego/x/merkledb" + "github.com/ava-labs/avalanchego/x/sync" + "github.com/ava-labs/coreth/core/rawdb" + "github.com/ava-labs/coreth/core/state" + "github.com/ava-labs/coreth/core/types" + "github.com/ava-labs/coreth/plugin/evm" + "github.com/ava-labs/coreth/triedb" + "github.com/ava-labs/coreth/triedb/pathdb" + "github.com/ethereum/go-ethereum/common" + "github.com/holiman/uint256" + "github.com/prometheus/client_golang/prometheus" + "github.com/stretchr/testify/require" + "golang.org/x/crypto/sha3" +) + +const defaultSimultaneousWorkLimit = 5 + +var lastKeyStoredAt = common.Hash{'p', 'r', 'e', 'f', 'i', 'x'} + +func getVal(addr common.Address, i uint64) common.Hash { + bytes := binary.BigEndian.AppendUint64(nil, i) + h := sha3.NewLegacyKeccak256() + h.Write(addr.Bytes()) + h.Write(bytes) + return common.BytesToHash(h.Sum(nil)) +} + +func addState(statedb *state.StateDB, addr common.Address, kvs int) { + amount := uint256.NewInt(100) + statedb.AddBalance(addr, amount) + statedb.SetNonce(addr, statedb.GetNonce(addr)+1) + + lastKeyHash := statedb.GetState(addr, lastKeyStoredAt) + lastKey := binary.BigEndian.Uint64(lastKeyHash.Bytes()[24:32]) + for i := lastKey; i < lastKey+uint64(kvs); i++ { + bytes := binary.BigEndian.AppendUint64(nil, i) + statedb.SetState(addr, common.BytesToHash(bytes), getVal(addr, i)) + } + bytes := binary.BigEndian.AppendUint64(nil, lastKey+uint64(kvs)) + statedb.SetState(addr, lastKeyStoredAt, common.BytesToHash(bytes)) + // fmt.Println("--> Address: ", addr, " KVs: ", lastKey+uint64(kvs)) +} + +func verifyState(t *testing.T, statedb *state.StateDB, addr common.Address) { + require := require.New(t) + + balance := statedb.GetBalance(addr) + nonce := statedb.GetNonce(addr) + + lastKeyHash := statedb.GetState(addr, lastKeyStoredAt) + lastKey := binary.BigEndian.Uint64(lastKeyHash.Bytes()[24:32]) + for i := uint64(0); i < lastKey; i++ { + bytes := binary.BigEndian.AppendUint64(nil, i) + require.Equal( + getVal(addr, i), + statedb.GetState(addr, common.BytesToHash(bytes))) + } + + t.Logf("Address: %s, KVs: %d, Balance: %s, Nonce: %d", addr, lastKey, balance, nonce) +} + +func TestSync(t *testing.T) { + require := require.New(t) + ctx := context.Background() + // Create a server database, fill with some state + serverDisk := rawdb.NewMemoryDatabase() + serverDB := state.NewDatabaseWithConfig(serverDisk, &triedb.Config{PathDB: pathdb.Defaults}) + + accounts := make([]common.Address, 100) + for i := 0; i < len(accounts); i++ { + bytes := binary.BigEndian.AppendUint64(nil, uint64(i)) + accounts[i] = common.BytesToAddress(bytes) + } + + accountsPerState := 10 + serverStates := 100 + serverRoot := types.EmptyRootHash + serverRoots := make([]common.Hash, serverStates) + for i := 0; i < serverStates; i++ { + statedb, err := state.New(serverRoot, serverDB, nil) + require.NoError(err) + + for j := 0; j < accountsPerState; j++ { + accIdx := (i*accountsPerState + j) % len(accounts) + kvs := 0 + if j%5 == 0 { + kvs = 1000 + } + addState(statedb, accounts[accIdx], kvs) + } + + serverRoot, err = statedb.Commit(uint64(i), true) + require.NoError(err) + serverRoots[i] = serverRoot + t.Logf("Server state %d: %s", i, serverRoot) + } + + // Create a client database + clientDisk := memdb.New() + client, err := New(ctx, clientDisk, merkledb.Config{}) + require.NoError(err) + + clientNodeID, serverNodeID := ids.GenerateTestNodeID(), ids.GenerateTestNodeID() + + server := &db{ + triedb: serverDB.TrieDB(), + root: serverRoot, + } + + rangeProofs := sync.NewGetRangeProofHandler(logging.NoLog{}, server) + changeProofs := sync.NewGetChangeProofHandler(logging.NoLog{}, server) + + log := logging.NewWrappedCore( + logging.Info, + os.Stdout, + logging.Auto.ConsoleEncoder(), + ) + syncRootIdx := 50 + initialRoot := serverRoots[syncRootIdx] + managerConfig := sync.ManagerConfig{ + DB: client, + Log: logging.NewLogger("sync", log), + BranchFactor: merkledb.BranchFactor16, + TargetRoot: ids.ID(initialRoot), + SimultaneousWorkLimit: defaultSimultaneousWorkLimit, + RangeProofClient: p2ptest.NewClient(t, ctx, rangeProofs, clientNodeID, serverNodeID), + ChangeProofClient: p2ptest.NewClient(t, ctx, changeProofs, clientNodeID, serverNodeID), + KVCallback: client.KVCallback, + } + manager, err := sync.NewManager(managerConfig, prometheus.NewRegistry()) + require.NoError(err) + client.manager = manager // Needed to hook-up KVCallback + + require.NoError(manager.Start(ctx)) + + doneCh := make(chan struct{}) + go func() { + require.NoError(manager.Wait(ctx)) + defer close(doneCh) + }() + + lastIdx := len(serverRoots) - 1 +l: + for { + select { + case <-doneCh: + break l + case <-time.After(50 * time.Millisecond): + + if syncRootIdx+1 > lastIdx { + <-doneCh + break l + } + + nextTarget := ids.ID(serverRoots[syncRootIdx+1]) + err := manager.UpdateSyncTarget(nextTarget) + if err == sync.ErrAlreadyClosed { + break l + } + require.NoError(err) + syncRootIdx++ + } + } + + // Verify the client database + ethdb := rawdb.NewDatabase(evm.Database{Database: clientDisk}) + clientDB := state.NewDatabaseWithConfig(ethdb, &triedb.Config{PathDB: pathdb.Defaults}) + t.Logf("syncRootIdx: %d, syncRoot: %x", syncRootIdx, serverRoots[syncRootIdx]) + clientState, err := state.New(serverRoots[syncRootIdx], clientDB, nil) + require.NoError(err) + for _, account := range accounts { + verifyState(t, clientState, account) + } +} diff --git a/x/ethsync/utils.go b/x/ethsync/utils.go new file mode 100644 index 000000000000..d8f87d6209ad --- /dev/null +++ b/x/ethsync/utils.go @@ -0,0 +1,40 @@ +// (c) 2024, Ava Labs, Inc. All rights reserved. +// See the file LICENSE for licensing terms. + +package ethsync + +import ( + "github.com/ava-labs/avalanchego/utils/maybe" + "github.com/ava-labs/avalanchego/x/merkledb" + "github.com/ethereum/go-ethereum/ethdb" +) + +// IncrOne increments bytes value by one +func IncrOne(bytes []byte) { + index := len(bytes) - 1 + for index >= 0 { + if bytes[index] < 255 { + bytes[index]++ + break + } else { + bytes[index] = 0 + index-- + } + } +} + +var _ ethdb.KeyValueWriter = (*proof)(nil) + +type proof []ProofNode + +func (p *proof) Put(k, v []byte) error { + *p = append(*p, ProofNode{ + Key: merkledb.ToKey(k), + ValueOrHash: maybe.Some(v), + }) + return nil +} + +func (*proof) Delete([]byte) error { + panic("should not be called") +} diff --git a/x/merkledb/proof.go b/x/merkledb/proof.go index 2191f1c1a749..d00db425c689 100644 --- a/x/merkledb/proof.go +++ b/x/merkledb/proof.go @@ -194,51 +194,6 @@ func (proof *Proof) Verify( return nil } -func (proof *Proof) ToProto() *pb.Proof { - value := &pb.MaybeBytes{ - Value: proof.Value.Value(), - IsNothing: proof.Value.IsNothing(), - } - - pbProof := &pb.Proof{ - Key: proof.Key.Bytes(), - Value: value, - } - - pbProof.Proof = make([]*pb.ProofNode, len(proof.Path)) - for i, node := range proof.Path { - pbProof.Proof[i] = node.ToProto() - } - - return pbProof -} - -func (proof *Proof) UnmarshalProto(pbProof *pb.Proof) error { - switch { - case pbProof == nil: - return ErrNilProof - case pbProof.Value == nil: - return ErrNilValue - case pbProof.Value.IsNothing && len(pbProof.Value.Value) != 0: - return ErrInvalidMaybe - } - - proof.Key = ToKey(pbProof.Key) - - if !pbProof.Value.IsNothing { - proof.Value = maybe.Some(pbProof.Value.Value) - } - - proof.Path = make([]ProofNode, len(pbProof.Proof)) - for i, pbNode := range pbProof.Proof { - if err := proof.Path[i].UnmarshalProto(pbNode); err != nil { - return err - } - } - - return nil -} - type KeyValue struct { Key []byte Value []byte diff --git a/x/merkledb/proof_test.go b/x/merkledb/proof_test.go index d8bad6c1e142..542c6bc6d47a 100644 --- a/x/merkledb/proof_test.go +++ b/x/merkledb/proof_test.go @@ -1598,92 +1598,6 @@ func TestChangeProofUnmarshalProtoInvalidMaybe(t *testing.T) { require.ErrorIs(t, err, ErrInvalidMaybe) } -func FuzzProofProtoMarshalUnmarshal(f *testing.F) { - f.Fuzz(func( - t *testing.T, - randSeed int64, - ) { - require := require.New(t) - rand := rand.New(rand.NewSource(randSeed)) // #nosec G404 - - // Make a random proof. - proofLen := rand.Intn(32) - proofPath := make([]ProofNode, proofLen) - for i := 0; i < proofLen; i++ { - proofPath[i] = newRandomProofNode(rand) - } - - keyLen := rand.Intn(32) - key := make([]byte, keyLen) - _, _ = rand.Read(key) - - hasValue := rand.Intn(2) == 1 - value := maybe.Nothing[[]byte]() - if hasValue { - valueLen := rand.Intn(32) - valueBytes := make([]byte, valueLen) - _, _ = rand.Read(valueBytes) - value = maybe.Some(valueBytes) - } - - proof := Proof{ - Key: ToKey(key), - Value: value, - Path: proofPath, - } - - // Marshal and unmarshal it. - // Assert the unmarshaled one is the same as the original. - var unmarshaledProof Proof - protoProof := proof.ToProto() - require.NoError(unmarshaledProof.UnmarshalProto(protoProof)) - require.Equal(proof, unmarshaledProof) - - // Marshaling again should yield same result. - protoUnmarshaledProof := unmarshaledProof.ToProto() - require.Equal(protoProof, protoUnmarshaledProof) - }) -} - -func TestProofProtoUnmarshal(t *testing.T) { - type test struct { - name string - proof *pb.Proof - expectedErr error - } - - tests := []test{ - { - name: "nil", - proof: nil, - expectedErr: ErrNilProof, - }, - { - name: "nil value", - proof: &pb.Proof{}, - expectedErr: ErrNilValue, - }, - { - name: "invalid maybe", - proof: &pb.Proof{ - Value: &pb.MaybeBytes{ - Value: []byte{1}, - IsNothing: true, - }, - }, - expectedErr: ErrInvalidMaybe, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var proof Proof - err := proof.UnmarshalProto(tt.proof) - require.ErrorIs(t, err, tt.expectedErr) - }) - } -} - func FuzzRangeProofInvariants(f *testing.F) { deletePortion := 0.25 f.Fuzz(func( diff --git a/x/sync/client_test.go b/x/sync/client_test.go index decc3e20405d..499feee35928 100644 --- a/x/sync/client_test.go +++ b/x/sync/client_test.go @@ -40,7 +40,7 @@ func newDefaultDBConfig() merkledb.Config { func newFlakyRangeProofHandler( t *testing.T, - db merkledb.MerkleDB, + db DB, modifyResponse func(response *merkledb.RangeProof), ) p2p.Handler { handler := NewGetRangeProofHandler(logging.NoLog{}, db) @@ -76,7 +76,7 @@ func newFlakyRangeProofHandler( func newFlakyChangeProofHandler( t *testing.T, - db merkledb.MerkleDB, + db DB, modifyResponse func(response *merkledb.ChangeProof), ) p2p.Handler { handler := NewGetChangeProofHandler(logging.NoLog{}, db) diff --git a/x/sync/db.go b/x/sync/db.go index 5ed9061b5889..30045d53459f 100644 --- a/x/sync/db.go +++ b/x/sync/db.go @@ -8,7 +8,6 @@ import "github.com/ava-labs/avalanchego/x/merkledb" type DB interface { merkledb.Clearer merkledb.MerkleRootGetter - merkledb.ProofGetter merkledb.ChangeProofer merkledb.RangeProofer } diff --git a/x/sync/g_db/db_client.go b/x/sync/g_db/db_client.go index b2168168a938..8c30c88466cd 100644 --- a/x/sync/g_db/db_client.go +++ b/x/sync/g_db/db_client.go @@ -148,21 +148,6 @@ func (c *DBClient) CommitChangeProof(ctx context.Context, proof *merkledb.Change return err } -func (c *DBClient) GetProof(ctx context.Context, key []byte) (*merkledb.Proof, error) { - resp, err := c.client.GetProof(ctx, &pb.GetProofRequest{ - Key: key, - }) - if err != nil { - return nil, err - } - - var proof merkledb.Proof - if err := proof.UnmarshalProto(resp.Proof); err != nil { - return nil, err - } - return &proof, nil -} - func (c *DBClient) GetRangeProofAtRoot( ctx context.Context, rootID ids.ID, diff --git a/x/sync/g_db/db_server.go b/x/sync/g_db/db_server.go index 795e2bf0e1fd..4ca3e85f6fbc 100644 --- a/x/sync/g_db/db_server.go +++ b/x/sync/g_db/db_server.go @@ -136,20 +136,6 @@ func (s *DBServer) CommitChangeProof( return &emptypb.Empty{}, err } -func (s *DBServer) GetProof( - ctx context.Context, - req *pb.GetProofRequest, -) (*pb.GetProofResponse, error) { - proof, err := s.db.GetProof(ctx, req.Key) - if err != nil { - return nil, err - } - - return &pb.GetProofResponse{ - Proof: proof.ToProto(), - }, nil -} - func (s *DBServer) GetRangeProof( ctx context.Context, req *pb.GetRangeProofRequest, diff --git a/x/sync/manager.go b/x/sync/manager.go index 57951f83eff9..1c2ab99e617f 100644 --- a/x/sync/manager.go +++ b/x/sync/manager.go @@ -8,6 +8,7 @@ import ( "context" "errors" "fmt" + "io" "math" "slices" "sync" @@ -87,6 +88,12 @@ func (w *workItem) requestFailed() { } func newWorkItem(localRootID ids.ID, start maybe.Maybe[[]byte], end maybe.Maybe[[]byte], priority priority, queueTime time.Time) *workItem { + if len(start.Value()) == 64 && len(end.Value()) != 64 { + panic("invalid work item") + } + if len(end.Value()) == 64 && len(start.Value()) != 64 { + panic("invalid work item") + } return &workItem{ localRootID: localRootID, start: start, @@ -152,6 +159,12 @@ type ManagerConfig struct { StateSyncNodes []ids.NodeID // If not specified, [merkledb.DefaultHasher] will be used. Hasher merkledb.Hasher + + KVCallback func(start, end maybe.Maybe[[]byte], priority byte, endID ids.ID, keyValues []merkledb.KeyChange) error +} + +func (m *Manager) EnqueueWork(start, end maybe.Maybe[[]byte], priorityAsByte byte) { + m.enqueueWork(newWorkItem(ids.Empty, start, end, priority(priorityAsByte), time.Now())) } func NewManager(config ManagerConfig, registerer prometheus.Registerer) (*Manager, error) { @@ -272,6 +285,10 @@ func (m *Manager) close() { m.cancelCtx() } + if closer, ok := m.config.DB.(io.Closer); ok { + _ = closer.Close() + } + // ensure any goroutines waiting for work from the heaps gets released m.unprocessedWork.Close() m.unprocessedWorkCond.Signal() @@ -539,6 +556,23 @@ func (m *Manager) handleRangeProofResponse( m.setError(err) return nil } + keyChanges := make([]merkledb.KeyChange, len(rangeProof.KeyValues)) + for i, keyValue := range rangeProof.KeyValues { + keyChanges[i] = merkledb.KeyChange{ + Key: keyValue.Key, + Value: maybe.Some(keyValue.Value), + } + } + if m.config.KVCallback != nil { + root, err := ids.ToID(request.RootHash) + if err != nil { + return err + } + if err := m.config.KVCallback(work.start, work.end, byte(work.priority), root, keyChanges); err != nil { + m.setError(err) + return nil + } + } if len(rangeProof.KeyValues) > 0 { largestHandledKey = maybe.Some(rangeProof.KeyValues[len(rangeProof.KeyValues)-1].Key) @@ -607,6 +641,14 @@ func (m *Manager) handleChangeProofResponse( m.setError(err) return nil } + + if m.config.KVCallback != nil { + if err := m.config.KVCallback(work.start, work.end, byte(work.priority), endRoot, changeProof.KeyChanges); err != nil { + m.setError(err) + return nil + } + } + largestHandledKey = maybe.Some(changeProof.KeyChanges[len(changeProof.KeyChanges)-1].Key) } @@ -665,161 +707,28 @@ func (m *Manager) handleChangeProofResponse( // Invariant: [lastReceivedKey] < [rangeEnd]. // If [rangeEnd] is Nothing it's considered > [lastReceivedKey]. func (m *Manager) findNextKey( - ctx context.Context, + _ context.Context, lastReceivedKey []byte, - rangeEnd maybe.Maybe[[]byte], - endProof []merkledb.ProofNode, + end maybe.Maybe[[]byte], + _ []merkledb.ProofNode, ) (maybe.Maybe[[]byte], error) { - if len(endProof) == 0 { - // We try to find the next key to fetch by looking at the end proof. - // If the end proof is empty, we have no information to use. - // Start fetching from the next key after [lastReceivedKey]. - nextKey := lastReceivedKey - nextKey = append(nextKey, 0) - return maybe.Some(nextKey), nil + type nextKeyer interface { + NextKey([]byte, maybe.Maybe[[]byte]) ([]byte, error) } - - // We want the first key larger than the [lastReceivedKey]. - // This is done by taking two proofs for the same key - // (one that was just received as part of a proof, and one from the local db) - // and traversing them from the longest key to the shortest key. - // For each node in these proofs, compare if the children of that node exist - // or have the same ID in the other proof. - proofKeyPath := merkledb.ToKey(lastReceivedKey) - - // If the received proof is an exclusion proof, the last node may be for a - // key that is after the [lastReceivedKey]. - // If the last received node's key is after the [lastReceivedKey], it can - // be removed to obtain a valid proof for a prefix of the [lastReceivedKey]. - if !proofKeyPath.HasPrefix(endProof[len(endProof)-1].Key) { - endProof = endProof[:len(endProof)-1] - // update the proofKeyPath to be for the prefix - proofKeyPath = endProof[len(endProof)-1].Key - } - - // get a proof for the same key as the received proof from the local db - localProofOfKey, err := m.config.DB.GetProof(ctx, proofKeyPath.Bytes()) - if err != nil { - return maybe.Nothing[[]byte](), err - } - localProofNodes := localProofOfKey.Path - - // The local proof may also be an exclusion proof with an extra node. - // Remove this extra node if it exists to get a proof of the same key as the received proof - if !proofKeyPath.HasPrefix(localProofNodes[len(localProofNodes)-1].Key) { - localProofNodes = localProofNodes[:len(localProofNodes)-1] - } - - nextKey := maybe.Nothing[[]byte]() - - // Add sentinel node back into the localProofNodes, if it is missing. - // Required to ensure that a common node exists in both proofs - if len(localProofNodes) > 0 && localProofNodes[0].Key.Length() != 0 { - sentinel := merkledb.ProofNode{ - Children: map[byte]ids.ID{ - localProofNodes[0].Key.Token(0, m.tokenSize): ids.Empty, - }, - } - localProofNodes = append([]merkledb.ProofNode{sentinel}, localProofNodes...) - } - - // Add sentinel node back into the endProof, if it is missing. - // Required to ensure that a common node exists in both proofs - if len(endProof) > 0 && endProof[0].Key.Length() != 0 { - sentinel := merkledb.ProofNode{ - Children: map[byte]ids.ID{ - endProof[0].Key.Token(0, m.tokenSize): ids.Empty, - }, - } - endProof = append([]merkledb.ProofNode{sentinel}, endProof...) - } - - localProofNodeIndex := len(localProofNodes) - 1 - receivedProofNodeIndex := len(endProof) - 1 - - // traverse the two proofs from the deepest nodes up to the sentinel node until a difference is found - for localProofNodeIndex >= 0 && receivedProofNodeIndex >= 0 && nextKey.IsNothing() { - localProofNode := localProofNodes[localProofNodeIndex] - receivedProofNode := endProof[receivedProofNodeIndex] - - // [deepestNode] is the proof node with the longest key (deepest in the trie) in the - // two proofs that hasn't been handled yet. - // [deepestNodeFromOtherProof] is the proof node from the other proof with - // the same key/depth if it exists, nil otherwise. - var deepestNode, deepestNodeFromOtherProof *merkledb.ProofNode - - // select the deepest proof node from the two proofs - switch { - case receivedProofNode.Key.Length() > localProofNode.Key.Length(): - // there was a branch node in the received proof that isn't in the local proof - // see if the received proof node has children not present in the local proof - deepestNode = &receivedProofNode - - // we have dealt with this received node, so move on to the next received node - receivedProofNodeIndex-- - - case localProofNode.Key.Length() > receivedProofNode.Key.Length(): - // there was a branch node in the local proof that isn't in the received proof - // see if the local proof node has children not present in the received proof - deepestNode = &localProofNode - - // we have dealt with this local node, so move on to the next local node - localProofNodeIndex-- - - default: - // the two nodes are at the same depth - // see if any of the children present in the local proof node are different - // from the children in the received proof node - deepestNode = &localProofNode - deepestNodeFromOtherProof = &receivedProofNode - - // we have dealt with this local node and received node, so move on to the next nodes - localProofNodeIndex-- - receivedProofNodeIndex-- - } - - // We only want to look at the children with keys greater than the proofKey. - // The proof key has the deepest node's key as a prefix, - // so only the next token of the proof key needs to be considered. - - // If the deepest node has the same key as [proofKeyPath], - // then all of its children have keys greater than the proof key, - // so we can start at the 0 token. - startingChildToken := 0 - - // If the deepest node has a key shorter than the key being proven, - // we can look at the next token index of the proof key to determine which of that - // node's children have keys larger than [proofKeyPath]. - // Any child with a token greater than the [proofKeyPath]'s token at that - // index will have a larger key. - if deepestNode.Key.Length() < proofKeyPath.Length() { - startingChildToken = int(proofKeyPath.Token(deepestNode.Key.Length(), m.tokenSize)) + 1 - } - - // determine if there are any differences in the children for the deepest unhandled node of the two proofs - if childIndex, hasDifference := findChildDifference(deepestNode, deepestNodeFromOtherProof, startingChildToken); hasDifference { - nextKey = maybe.Some(deepestNode.Key.Extend(merkledb.ToToken(childIndex, m.tokenSize)).Bytes()) - break + if nextKeyer, ok := m.config.DB.(nextKeyer); ok { + nextKey, err := nextKeyer.NextKey(lastReceivedKey, end) + if err != nil { + return maybe.Nothing[[]byte](), err } - } - - // If the nextKey is before or equal to the [lastReceivedKey] - // then we couldn't find a better answer than the [lastReceivedKey]. - // Set the nextKey to [lastReceivedKey] + 0, which is the first key in - // the open range (lastReceivedKey, rangeEnd). - if nextKey.HasValue() && bytes.Compare(nextKey.Value(), lastReceivedKey) <= 0 { - nextKeyVal := slices.Clone(lastReceivedKey) - nextKeyVal = append(nextKeyVal, 0) - nextKey = maybe.Some(nextKeyVal) - } - - // If the [nextKey] is larger than the end of the range, return Nothing to signal that there is no next key in range - if rangeEnd.HasValue() && bytes.Compare(nextKey.Value(), rangeEnd.Value()) >= 0 { - return maybe.Nothing[[]byte](), nil + return maybe.Some(nextKey), nil + } else { + panic("not implemented") } // the nextKey is within the open range (lastReceivedKey, rangeEnd), so return it - return nextKey, nil + nextKey := lastReceivedKey + nextKey = append(nextKey, 0) + return maybe.Some(nextKey), nil } func (m *Manager) Error() error { @@ -973,7 +882,7 @@ func (m *Manager) completeWorkItem(ctx context.Context, work *workItem, largestH } // completed the range [work.start, lastKey], log and record in the completed work heap - m.config.Log.Debug("completed range", + m.config.Log.Info("completed range", zap.Stringer("start", work.start), zap.Stringer("end", largestHandledKey), zap.Stringer("rootID", rootID), @@ -1001,6 +910,15 @@ func (m *Manager) enqueueWork(work *workItem) { // Split the remaining range into to 2. // Find the middle point. mid := midPoint(work.start, work.end) + if len(work.start.Value()) == 64 { + if len(mid.Value()) > 64 { + mid = maybe.Some(mid.Value()[:64]) + } else if len(mid.Value()) < 64 { + panic("unexpected short mid") + } + } else if len(mid.Value()) > 32 { + mid = maybe.Some(mid.Value()[:32]) + } if maybe.Equal(work.start, mid, bytes.Equal) || maybe.Equal(mid, work.end, bytes.Equal) { // The range is too small to split. diff --git a/x/sync/network_server_test.go b/x/sync/network_server_test.go index c78554cea59f..3b3f081c3b2c 100644 --- a/x/sync/network_server_test.go +++ b/x/sync/network_server_test.go @@ -18,6 +18,7 @@ import ( "github.com/ava-labs/avalanchego/network/p2p" "github.com/ava-labs/avalanchego/snow/engine/common" "github.com/ava-labs/avalanchego/utils/logging" + "github.com/ava-labs/avalanchego/x/ethsync" "github.com/ava-labs/avalanchego/x/merkledb" pb "github.com/ava-labs/avalanchego/proto/pb/sync" @@ -28,7 +29,14 @@ func Test_Server_GetRangeProof(t *testing.T) { t.Logf("seed: %d", now) r := rand.New(rand.NewSource(now)) // #nosec G404 - smallTrieDB, err := generateTrieWithMinKeyLen(t, r, defaultRequestKeyLimit, 1) + smallTrieDB, err := ethsync.New( + context.Background(), + memdb.New(), + newDefaultDBConfig(), + ) + require.NoError(t, err) + batch := smallTrieDB.NewBatch() + err = generateWithKeyLenAndTrie(t, batch, r, defaultRequestKeyLimit, 32, 32) require.NoError(t, err) smallTrieRoot, err := smallTrieDB.GetMerkleRoot(context.Background()) require.NoError(t, err) @@ -156,7 +164,7 @@ func Test_Server_GetChangeProof(t *testing.T) { t.Logf("seed: %d", now) r := rand.New(rand.NewSource(now)) // #nosec G404 - serverDB, err := merkledb.New( + serverDB, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -170,7 +178,7 @@ func Test_Server_GetChangeProof(t *testing.T) { ops := make([]database.BatchOp, 0, 11) // add some key/values for i := 0; i < 10; i++ { - key := make([]byte, r.Intn(100)) + key := make([]byte, 32) // XXX: fix keylen _, err = r.Read(key) require.NoError(t, err) @@ -186,19 +194,26 @@ func Test_Server_GetChangeProof(t *testing.T) { _, err = r.Read(deleteKeyStart) require.NoError(t, err) - it := serverDB.NewIteratorWithStart(deleteKeyStart) - if it.Next() { - ops = append(ops, database.BatchOp{Key: it.Key(), Delete: true}) + deleteKey, ok := serverDB.IterateOneKey(deleteKeyStart) // XXX + if ok { + ops = append(ops, database.BatchOp{Key: deleteKey, Delete: true}) } - require.NoError(t, it.Error()) - it.Release() - view, err := serverDB.NewView( - context.Background(), - merkledb.ViewChanges{BatchOps: ops}, - ) - require.NoError(t, err) - require.NoError(t, view.CommitToDB(context.Background())) + batch := serverDB.NewBatch() + for _, op := range ops { + if op.Delete { + require.NoError(t, batch.Put(op.Key, nil)) + } else { + require.NoError(t, batch.Put(op.Key, op.Value)) + } + } + require.NoError(t, batch.Write()) + //view, err := serverDB.NewView( + // context.Background(), + // merkledb.ViewChanges{BatchOps: ops}, + //) + //require.NoError(t, err) + //require.NoError(t, view.CommitToDB(context.Background())) } endRoot, err := serverDB.GetMerkleRoot(context.Background()) diff --git a/x/sync/sync_test.go b/x/sync/sync_test.go index 8f065d838e5f..2638de65217b 100644 --- a/x/sync/sync_test.go +++ b/x/sync/sync_test.go @@ -14,13 +14,13 @@ import ( "github.com/prometheus/client_golang/prometheus" "github.com/stretchr/testify/require" - "github.com/ava-labs/avalanchego/database" "github.com/ava-labs/avalanchego/database/memdb" "github.com/ava-labs/avalanchego/ids" "github.com/ava-labs/avalanchego/network/p2p" "github.com/ava-labs/avalanchego/network/p2p/p2ptest" "github.com/ava-labs/avalanchego/utils/logging" "github.com/ava-labs/avalanchego/utils/maybe" + "github.com/ava-labs/avalanchego/x/ethsync" "github.com/ava-labs/avalanchego/x/merkledb" ) @@ -29,7 +29,7 @@ var _ p2p.Handler = (*waitingHandler)(nil) func Test_Creation(t *testing.T) { require := require.New(t) - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -52,7 +52,7 @@ func Test_Creation(t *testing.T) { func Test_Completion(t *testing.T) { require := require.New(t) - emptyDB, err := merkledb.New( + emptyDB, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -62,7 +62,7 @@ func Test_Completion(t *testing.T) { emptyRoot, err := emptyDB.GetMerkleRoot(context.Background()) require.NoError(err) - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -156,279 +156,13 @@ func Test_Midpoint(t *testing.T) { } } -func Test_Sync_FindNextKey_InSync(t *testing.T) { - require := require.New(t) - - now := time.Now().UnixNano() - t.Logf("seed: %d", now) - r := rand.New(rand.NewSource(now)) // #nosec G404 - dbToSync, err := generateTrie(t, r, 1000) - require.NoError(err) - syncRoot, err := dbToSync.GetMerkleRoot(context.Background()) - require.NoError(err) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: syncRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - require.NotNil(syncer) - - require.NoError(syncer.Start(context.Background())) - require.NoError(syncer.Wait(context.Background())) - - proof, err := dbToSync.GetRangeProof(context.Background(), maybe.Nothing[[]byte](), maybe.Nothing[[]byte](), 500) - require.NoError(err) - - // the two dbs should be in sync, so next key should be nil - lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key - nextKey, err := syncer.findNextKey(context.Background(), lastKey, maybe.Nothing[[]byte](), proof.EndProof) - require.NoError(err) - require.True(nextKey.IsNothing()) - - // add an extra value to sync db past the last key returned - newKey := midPoint(maybe.Some(lastKey), maybe.Nothing[[]byte]()) - newKeyVal := newKey.Value() - require.NoError(db.Put(newKeyVal, []byte{1})) - - // create a range endpoint that is before the newly added key, but after the last key - endPointBeforeNewKey := make([]byte, 0, 2) - for i := 0; i < len(newKeyVal); i++ { - endPointBeforeNewKey = append(endPointBeforeNewKey, newKeyVal[i]) - - // we need the new key to be after the last key - // don't subtract anything from the current byte if newkey and lastkey are equal - if lastKey[i] == newKeyVal[i] { - continue - } - - // if the first nibble is > 0, subtract "1" from it - if endPointBeforeNewKey[i] >= 16 { - endPointBeforeNewKey[i] -= 16 - break - } - // if the second nibble > 0, subtract 1 from it - if endPointBeforeNewKey[i] > 0 { - endPointBeforeNewKey[i] -= 1 - break - } - // both nibbles were 0, so move onto the next byte - } - - nextKey, err = syncer.findNextKey(context.Background(), lastKey, maybe.Some(endPointBeforeNewKey), proof.EndProof) - require.NoError(err) - - // next key would be after the end of the range, so it returns Nothing instead - require.True(nextKey.IsNothing()) -} - -func Test_Sync_FindNextKey_Deleted(t *testing.T) { - require := require.New(t) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - require.NoError(db.Put([]byte{0x10}, []byte{1})) - require.NoError(db.Put([]byte{0x11, 0x11}, []byte{2})) - - syncRoot, err := db.GetMerkleRoot(context.Background()) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: syncRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - - // 0x12 was "deleted" and there should be no extra node in the proof since there was nothing with a common prefix - noExtraNodeProof, err := db.GetProof(context.Background(), []byte{0x12}) - require.NoError(err) - - // 0x11 was "deleted" and 0x11.0x11 should be in the exclusion proof - extraNodeProof, err := db.GetProof(context.Background(), []byte{0x11}) - require.NoError(err) - - // there is now another value in the range that needs to be sync'ed - require.NoError(db.Put([]byte{0x13}, []byte{3})) - - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x12}, maybe.Some([]byte{0x20}), noExtraNodeProof.Path) - require.NoError(err) - require.Equal(maybe.Some([]byte{0x13}), nextKey) - - nextKey, err = syncer.findNextKey(context.Background(), []byte{0x11}, maybe.Some([]byte{0x20}), extraNodeProof.Path) - require.NoError(err) - require.Equal(maybe.Some([]byte{0x13}), nextKey) -} - -func Test_Sync_FindNextKey_BranchInLocal(t *testing.T) { - require := require.New(t) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - require.NoError(db.Put([]byte{0x11}, []byte{1})) - require.NoError(db.Put([]byte{0x11, 0x11}, []byte{2})) - - targetRoot, err := db.GetMerkleRoot(context.Background()) - require.NoError(err) - - proof, err := db.GetProof(context.Background(), []byte{0x11, 0x11}) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: targetRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - require.NoError(db.Put([]byte{0x11, 0x15}, []byte{4})) - - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x11, 0x11}, maybe.Some([]byte{0x20}), proof.Path) - require.NoError(err) - require.Equal(maybe.Some([]byte{0x11, 0x15}), nextKey) -} - -func Test_Sync_FindNextKey_BranchInReceived(t *testing.T) { - require := require.New(t) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - require.NoError(db.Put([]byte{0x11}, []byte{1})) - require.NoError(db.Put([]byte{0x12}, []byte{2})) - require.NoError(db.Put([]byte{0x12, 0xA0}, []byte{4})) - - targetRoot, err := db.GetMerkleRoot(context.Background()) - require.NoError(err) - - proof, err := db.GetProof(context.Background(), []byte{0x12}) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, db), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: targetRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - require.NoError(db.Delete([]byte{0x12, 0xA0})) - - nextKey, err := syncer.findNextKey(context.Background(), []byte{0x12}, maybe.Some([]byte{0x20}), proof.Path) - require.NoError(err) - require.Equal(maybe.Some([]byte{0x12, 0xA0}), nextKey) -} - -func Test_Sync_FindNextKey_ExtraValues(t *testing.T) { - require := require.New(t) - - now := time.Now().UnixNano() - t.Logf("seed: %d", now) - r := rand.New(rand.NewSource(now)) // #nosec G404 - dbToSync, err := generateTrie(t, r, 1000) - require.NoError(err) - syncRoot, err := dbToSync.GetMerkleRoot(context.Background()) - require.NoError(err) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: syncRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - require.NotNil(syncer) - - require.NoError(syncer.Start(context.Background())) - require.NoError(syncer.Wait(context.Background())) - - proof, err := dbToSync.GetRangeProof(context.Background(), maybe.Nothing[[]byte](), maybe.Nothing[[]byte](), 500) - require.NoError(err) - - // add an extra value to local db - lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key - midpoint := midPoint(maybe.Some(lastKey), maybe.Nothing[[]byte]()) - midPointVal := midpoint.Value() - - require.NoError(db.Put(midPointVal, []byte{1})) - - // next key at prefix of newly added point - nextKey, err := syncer.findNextKey(context.Background(), lastKey, maybe.Nothing[[]byte](), proof.EndProof) - require.NoError(err) - require.True(nextKey.HasValue()) - - require.True(isPrefix(midPointVal, nextKey.Value())) - - require.NoError(db.Delete(midPointVal)) - - require.NoError(dbToSync.Put(midPointVal, []byte{1})) - - proof, err = dbToSync.GetRangeProof(context.Background(), maybe.Nothing[[]byte](), maybe.Some(lastKey), 500) - require.NoError(err) - - // next key at prefix of newly added point - nextKey, err = syncer.findNextKey(context.Background(), lastKey, maybe.Nothing[[]byte](), proof.EndProof) - require.NoError(err) - require.True(nextKey.HasValue()) - - // deal with odd length key - require.True(isPrefix(midPointVal, nextKey.Value())) -} - func TestFindNextKeyEmptyEndProof(t *testing.T) { require := require.New(t) now := time.Now().UnixNano() t.Logf("seed: %d", now) r := rand.New(rand.NewSource(now)) // #nosec G404 - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -449,11 +183,11 @@ func TestFindNextKeyEmptyEndProof(t *testing.T) { require.NotNil(syncer) for i := 0; i < 100; i++ { - lastReceivedKeyLen := r.Intn(16) + lastReceivedKeyLen := 32 // XXX: fix keylen lastReceivedKey := make([]byte, lastReceivedKeyLen) _, _ = r.Read(lastReceivedKey) // #nosec G404 - rangeEndLen := r.Intn(16) + rangeEndLen := 32 // XXX: fix keylen rangeEndBytes := make([]byte, rangeEndLen) _, _ = r.Read(rangeEndBytes) // #nosec G404 @@ -469,7 +203,7 @@ func TestFindNextKeyEmptyEndProof(t *testing.T) { nil, /* endProof */ ) require.NoError(err) - require.Equal(maybe.Some(append(lastReceivedKey, 0)), nextKey) + require.Less(bytes.Compare(lastReceivedKey, nextKey.Value()), 0) } } @@ -486,277 +220,6 @@ func isPrefix(data []byte, prefix []byte) bool { return bytes.HasPrefix(data, prefix) } -func Test_Sync_FindNextKey_DifferentChild(t *testing.T) { - require := require.New(t) - - now := time.Now().UnixNano() - t.Logf("seed: %d", now) - r := rand.New(rand.NewSource(now)) // #nosec G404 - dbToSync, err := generateTrie(t, r, 500) - require.NoError(err) - syncRoot, err := dbToSync.GetMerkleRoot(context.Background()) - require.NoError(err) - - db, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: db, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, dbToSync), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: syncRoot, - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - require.NotNil(syncer) - require.NoError(syncer.Start(context.Background())) - require.NoError(syncer.Wait(context.Background())) - - proof, err := dbToSync.GetRangeProof(context.Background(), maybe.Nothing[[]byte](), maybe.Nothing[[]byte](), 100) - require.NoError(err) - lastKey := proof.KeyValues[len(proof.KeyValues)-1].Key - - // local db has a different child than remote db - lastKey = append(lastKey, 16) - require.NoError(db.Put(lastKey, []byte{1})) - - require.NoError(dbToSync.Put(lastKey, []byte{2})) - - proof, err = dbToSync.GetRangeProof(context.Background(), maybe.Nothing[[]byte](), maybe.Some(proof.KeyValues[len(proof.KeyValues)-1].Key), 100) - require.NoError(err) - - nextKey, err := syncer.findNextKey(context.Background(), proof.KeyValues[len(proof.KeyValues)-1].Key, maybe.Nothing[[]byte](), proof.EndProof) - require.NoError(err) - require.True(nextKey.HasValue()) - require.Equal(lastKey, nextKey.Value()) -} - -// Test findNextKey by computing the expected result in a naive, inefficient -// way and comparing it to the actual result -func TestFindNextKeyRandom(t *testing.T) { - now := time.Now().UnixNano() - t.Logf("seed: %d", now) - rand := rand.New(rand.NewSource(now)) // #nosec G404 - require := require.New(t) - - // Create a "remote" database and "local" database - remoteDB, err := merkledb.New( - context.Background(), - memdb.New(), - newDefaultDBConfig(), - ) - require.NoError(err) - - config := newDefaultDBConfig() - localDB, err := merkledb.New( - context.Background(), - memdb.New(), - config, - ) - require.NoError(err) - - var ( - numProofsToTest = 250 - numKeyValues = 250 - maxKeyLen = 256 - maxValLen = 256 - maxRangeStartLen = 8 - maxRangeEndLen = 8 - maxProofLen = 128 - ) - - // Put random keys into the databases - for _, db := range []database.Database{remoteDB, localDB} { - for i := 0; i < numKeyValues; i++ { - key := make([]byte, rand.Intn(maxKeyLen)) - _, _ = rand.Read(key) - val := make([]byte, rand.Intn(maxValLen)) - _, _ = rand.Read(val) - require.NoError(db.Put(key, val)) - } - } - - // Repeatedly generate end proofs from the remote database and compare - // the result of findNextKey to the expected result. - for proofIndex := 0; proofIndex < numProofsToTest; proofIndex++ { - // Generate a proof for a random key - var ( - rangeStart []byte - rangeEnd []byte - ) - // Generate a valid range start and end - for rangeStart == nil || bytes.Compare(rangeStart, rangeEnd) == 1 { - rangeStart = make([]byte, rand.Intn(maxRangeStartLen)+1) - _, _ = rand.Read(rangeStart) - rangeEnd = make([]byte, rand.Intn(maxRangeEndLen)+1) - _, _ = rand.Read(rangeEnd) - } - - startKey := maybe.Nothing[[]byte]() - if len(rangeStart) > 0 { - startKey = maybe.Some(rangeStart) - } - endKey := maybe.Nothing[[]byte]() - if len(rangeEnd) > 0 { - endKey = maybe.Some(rangeEnd) - } - - remoteProof, err := remoteDB.GetRangeProof( - context.Background(), - startKey, - endKey, - rand.Intn(maxProofLen)+1, - ) - require.NoError(err) - - if len(remoteProof.KeyValues) == 0 { - continue - } - lastReceivedKey := remoteProof.KeyValues[len(remoteProof.KeyValues)-1].Key - - // Commit the proof to the local database as we do - // in the actual syncer. - require.NoError(localDB.CommitRangeProof( - context.Background(), - startKey, - endKey, - remoteProof, - )) - - localProof, err := localDB.GetProof( - context.Background(), - lastReceivedKey, - ) - require.NoError(err) - - type keyAndID struct { - key merkledb.Key - id ids.ID - } - - // Set of key prefix/ID pairs proven by the remote database's end proof. - remoteKeyIDs := []keyAndID{} - for _, node := range remoteProof.EndProof { - for childIdx, childID := range node.Children { - remoteKeyIDs = append(remoteKeyIDs, keyAndID{ - key: node.Key.Extend(merkledb.ToToken(childIdx, merkledb.BranchFactorToTokenSize[config.BranchFactor])), - id: childID, - }) - } - } - - // Set of key prefix/ID pairs proven by the local database's proof. - localKeyIDs := []keyAndID{} - for _, node := range localProof.Path { - for childIdx, childID := range node.Children { - localKeyIDs = append(localKeyIDs, keyAndID{ - key: node.Key.Extend(merkledb.ToToken(childIdx, merkledb.BranchFactorToTokenSize[config.BranchFactor])), - id: childID, - }) - } - } - - // Sort in ascending order by key prefix. - serializedPathCompare := func(i, j keyAndID) int { - return i.key.Compare(j.key) - } - slices.SortFunc(remoteKeyIDs, serializedPathCompare) - slices.SortFunc(localKeyIDs, serializedPathCompare) - - // Filter out keys that are before the last received key - findBounds := func(keyIDs []keyAndID) (int, int) { - var ( - firstIdxInRange = len(keyIDs) - firstIdxInRangeFound = false - firstIdxOutOfRange = len(keyIDs) - ) - for i, keyID := range keyIDs { - if !firstIdxInRangeFound && bytes.Compare(keyID.key.Bytes(), lastReceivedKey) > 0 { - firstIdxInRange = i - firstIdxInRangeFound = true - continue - } - if bytes.Compare(keyID.key.Bytes(), rangeEnd) > 0 { - firstIdxOutOfRange = i - break - } - } - return firstIdxInRange, firstIdxOutOfRange - } - - remoteFirstIdxAfterLastReceived, remoteFirstIdxAfterEnd := findBounds(remoteKeyIDs) - remoteKeyIDs = remoteKeyIDs[remoteFirstIdxAfterLastReceived:remoteFirstIdxAfterEnd] - - localFirstIdxAfterLastReceived, localFirstIdxAfterEnd := findBounds(localKeyIDs) - localKeyIDs = localKeyIDs[localFirstIdxAfterLastReceived:localFirstIdxAfterEnd] - - // Find smallest difference between the set of key/ID pairs proven by - // the remote/local proofs for key/ID pairs after the last received key. - var ( - smallestDiffKey merkledb.Key - foundDiff bool - ) - for i := 0; i < len(remoteKeyIDs) && i < len(localKeyIDs); i++ { - // See if the keys are different. - smaller, bigger := remoteKeyIDs[i], localKeyIDs[i] - if serializedPathCompare(localKeyIDs[i], remoteKeyIDs[i]) == -1 { - smaller, bigger = localKeyIDs[i], remoteKeyIDs[i] - } - - if smaller.key != bigger.key || smaller.id != bigger.id { - smallestDiffKey = smaller.key - foundDiff = true - break - } - } - if !foundDiff { - // All the keys were equal. The smallest diff is the next key - // in the longer of the lists (if they're not same length.) - if len(remoteKeyIDs) < len(localKeyIDs) { - smallestDiffKey = localKeyIDs[len(remoteKeyIDs)].key - } else if len(remoteKeyIDs) > len(localKeyIDs) { - smallestDiffKey = remoteKeyIDs[len(localKeyIDs)].key - } - } - - // Get the actual value from the syncer - ctx := context.Background() - syncer, err := NewManager(ManagerConfig{ - DB: localDB, - RangeProofClient: p2ptest.NewClient(t, ctx, NewGetRangeProofHandler(logging.NoLog{}, remoteDB), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - ChangeProofClient: p2ptest.NewClient(t, ctx, NewGetChangeProofHandler(logging.NoLog{}, remoteDB), ids.GenerateTestNodeID(), ids.GenerateTestNodeID()), - TargetRoot: ids.GenerateTestID(), - SimultaneousWorkLimit: 5, - Log: logging.NoLog{}, - BranchFactor: merkledb.BranchFactor16, - }, prometheus.NewRegistry()) - require.NoError(err) - - gotFirstDiff, err := syncer.findNextKey( - context.Background(), - lastReceivedKey, - endKey, - remoteProof.EndProof, - ) - require.NoError(err) - - if bytes.Compare(smallestDiffKey.Bytes(), rangeEnd) >= 0 { - // The smallest key which differs is after the range end so the - // next key to get should be nil because we're done fetching the range. - require.True(gotFirstDiff.IsNothing()) - } else { - require.Equal(smallestDiffKey.Bytes(), gotFirstDiff.Value()) - } - } -} - // Tests that we are able to sync to the correct root while the server is // updating func Test_Sync_Result_Correct_Root(t *testing.T) { @@ -766,13 +229,12 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { tests := []struct { name string - db merkledb.MerkleDB - rangeProofClient func(db merkledb.MerkleDB) *p2p.Client - changeProofClient func(db merkledb.MerkleDB) *p2p.Client + rangeProofClient func(db DB) *p2p.Client + changeProofClient func(db DB) *p2p.Client }{ { name: "range proof bad response - too many leaves in response", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.KeyValues = append(response.KeyValues, merkledb.KeyValue{}) }) @@ -782,7 +244,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - removed first key in response", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.KeyValues = response.KeyValues[min(1, len(response.KeyValues)):] }) @@ -792,12 +254,12 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - removed first key in response and replaced proof", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.KeyValues = response.KeyValues[min(1, len(response.KeyValues)):] response.KeyValues = []merkledb.KeyValue{ { - Key: []byte("foo"), + Key: []byte("01234567890123456789012345678901"), Value: []byte("bar"), }, } @@ -818,7 +280,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - removed key from middle of response", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { i := rand.Intn(max(1, len(response.KeyValues)-1)) // #nosec G404 _ = slices.Delete(response.KeyValues, i, min(len(response.KeyValues), i+1)) @@ -829,7 +291,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - start and end proof nodes removed", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.StartProof = nil response.EndProof = nil @@ -840,7 +302,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - end proof removed", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.EndProof = nil }) @@ -850,7 +312,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof bad response - empty proof", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { handler := newFlakyRangeProofHandler(t, db, func(response *merkledb.RangeProof) { response.StartProof = nil response.EndProof = nil @@ -862,7 +324,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "range proof server flake", - rangeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + rangeProofClient: func(db DB) *p2p.Client { return p2ptest.NewClient(t, context.Background(), &flakyHandler{ Handler: NewGetRangeProofHandler(logging.NoLog{}, db), c: &counter{m: 2}, @@ -871,7 +333,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "change proof bad response - too many keys in response", - changeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + changeProofClient: func(db DB) *p2p.Client { handler := newFlakyChangeProofHandler(t, db, func(response *merkledb.ChangeProof) { response.KeyChanges = append(response.KeyChanges, make([]merkledb.KeyChange, defaultRequestKeyLimit)...) }) @@ -881,7 +343,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "change proof bad response - removed first key in response", - changeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + changeProofClient: func(db DB) *p2p.Client { handler := newFlakyChangeProofHandler(t, db, func(response *merkledb.ChangeProof) { response.KeyChanges = response.KeyChanges[min(1, len(response.KeyChanges)):] }) @@ -891,7 +353,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "change proof bad response - removed key from middle of response", - changeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + changeProofClient: func(db DB) *p2p.Client { handler := newFlakyChangeProofHandler(t, db, func(response *merkledb.ChangeProof) { i := rand.Intn(max(1, len(response.KeyChanges)-1)) // #nosec G404 _ = slices.Delete(response.KeyChanges, i, min(len(response.KeyChanges), i+1)) @@ -902,7 +364,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "change proof bad response - all proof keys removed from response", - changeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + changeProofClient: func(db DB) *p2p.Client { handler := newFlakyChangeProofHandler(t, db, func(response *merkledb.ChangeProof) { response.StartProof = nil response.EndProof = nil @@ -913,7 +375,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { }, { name: "change proof flaky server", - changeProofClient: func(db merkledb.MerkleDB) *p2p.Client { + changeProofClient: func(db DB) *p2p.Client { return p2ptest.NewClient(t, context.Background(), &flakyHandler{ Handler: NewGetChangeProofHandler(logging.NoLog{}, db), c: &counter{m: 2}, @@ -927,13 +389,20 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { require := require.New(t) ctx := context.Background() - dbToSync, err := generateTrie(t, r, 3*maxKeyValuesLimit) + dbToSync, err := ethsync.New( + context.Background(), + memdb.New(), + newDefaultDBConfig(), + ) + require.NoError(err) + batch := dbToSync.NewBatch() + err = generateWithKeyLenAndTrie(t, batch, r, 3*maxKeyValuesLimit, 32, 32) require.NoError(err) syncRoot, err := dbToSync.GetMerkleRoot(ctx) require.NoError(err) - db, err := merkledb.New( + db, err := ethsync.New( ctx, memdb.New(), newDefaultDBConfig(), @@ -980,7 +449,7 @@ func Test_Sync_Result_Correct_Root(t *testing.T) { // error because it has already reached the sync target before it // is called. for i := 0; i < 50; i++ { - addkey := make([]byte, r.Intn(50)) + addkey := make([]byte, 32) // XXX: fix keylen _, err = r.Read(addkey) require.NoError(err) val := make([]byte, r.Intn(50)) @@ -1016,12 +485,19 @@ func Test_Sync_Result_Correct_Root_With_Sync_Restart(t *testing.T) { now := time.Now().UnixNano() t.Logf("seed: %d", now) r := rand.New(rand.NewSource(now)) // #nosec G404 - dbToSync, err := generateTrie(t, r, 3*maxKeyValuesLimit) + dbToSync, err := ethsync.New( + context.Background(), + memdb.New(), + newDefaultDBConfig(), + ) + require.NoError(err) + batch := dbToSync.NewBatch() + err = generateWithKeyLenAndTrie(t, batch, r, 3*maxKeyValuesLimit, 32, 32) require.NoError(err) syncRoot, err := dbToSync.GetMerkleRoot(context.Background()) require.NoError(err) - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -1078,22 +554,27 @@ func Test_Sync_Result_Correct_Root_With_Sync_Restart(t *testing.T) { } func Test_Sync_Result_Correct_Root_Update_Root_During(t *testing.T) { - t.Skip("FLAKY") - require := require.New(t) now := time.Now().UnixNano() t.Logf("seed: %d", now) r := rand.New(rand.NewSource(now)) // #nosec G404 - dbToSync, err := generateTrie(t, r, 3*maxKeyValuesLimit) + dbToSync, err := ethsync.New( + context.Background(), + memdb.New(), + newDefaultDBConfig(), + ) + require.NoError(err) + batch := dbToSync.NewBatch() + err = generateWithKeyLenAndTrie(t, batch, r, 3*maxKeyValuesLimit, 32, 32) require.NoError(err) firstSyncRoot, err := dbToSync.GetMerkleRoot(context.Background()) require.NoError(err) for x := 0; x < 100; x++ { - key := make([]byte, r.Intn(50)) + key := make([]byte, 32) // XXX: fix keylen _, err = r.Read(key) require.NoError(err) @@ -1103,22 +584,20 @@ func Test_Sync_Result_Correct_Root_Update_Root_During(t *testing.T) { require.NoError(dbToSync.Put(key, val)) - deleteKeyStart := make([]byte, r.Intn(50)) + deleteKeyStart := make([]byte, 32) // XXX: fix keylen _, err = r.Read(deleteKeyStart) require.NoError(err) - it := dbToSync.NewIteratorWithStart(deleteKeyStart) - if it.Next() { - require.NoError(dbToSync.Delete(it.Key())) + nextKey, ok := dbToSync.IterateOneKey(deleteKeyStart) // XXX + if ok { + require.NoError(dbToSync.Put(nextKey, nil)) } - require.NoError(it.Error()) - it.Release() } secondSyncRoot, err := dbToSync.GetMerkleRoot(context.Background()) require.NoError(err) - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -1180,7 +659,7 @@ func Test_Sync_Result_Correct_Root_Update_Root_During(t *testing.T) { func Test_Sync_UpdateSyncTarget(t *testing.T) { require := require.New(t) - db, err := merkledb.New( + db, err := ethsync.New( context.Background(), memdb.New(), newDefaultDBConfig(), @@ -1235,8 +714,6 @@ func generateTrie(t *testing.T, r *rand.Rand, count int) (merkledb.MerkleDB, err } func generateTrieWithMinKeyLen(t *testing.T, r *rand.Rand, count int, minKeyLen int) (merkledb.MerkleDB, error) { - require := require.New(t) - db, err := merkledb.New( context.Background(), memdb.New(), @@ -1245,14 +722,25 @@ func generateTrieWithMinKeyLen(t *testing.T, r *rand.Rand, count int, minKeyLen if err != nil { return nil, err } + defaultMaxKeyLen := 50 + return db, generateWithKeyLenAndTrie(t, db.NewBatch(), r, count, minKeyLen, defaultMaxKeyLen) +} + +type batch interface { + Put(key, value []byte) error + Write() error +} + +func generateWithKeyLenAndTrie(t *testing.T, batch batch, r *rand.Rand, count, minKeyLen, maxKeyLen int) error { + require := require.New(t) + var ( allKeys [][]byte seenKeys = make(map[string]struct{}) - batch = db.NewBatch() ) genKey := func() []byte { // new prefixed key - if len(allKeys) > 2 && r.Intn(25) < 10 { + if len(allKeys) > 2 && r.Intn(25) < 10 && false { // XXX: Disabled for now prefix := allKeys[r.Intn(len(allKeys))] key := make([]byte, r.Intn(50)+len(prefix)) copy(key, prefix) @@ -1262,18 +750,19 @@ func generateTrieWithMinKeyLen(t *testing.T, r *rand.Rand, count int, minKeyLen } // new key - key := make([]byte, r.Intn(50)+minKeyLen) - _, err = r.Read(key) + keyLenRange := maxKeyLen - minKeyLen + key := make([]byte, r.Intn(keyLenRange+1)+minKeyLen) + _, err := r.Read(key) require.NoError(err) return key } for i := 0; i < count; { - value := make([]byte, r.Intn(51)) + value := make([]byte, r.Intn(51)+1) if len(value) == 0 { value = nil } else { - _, err = r.Read(value) + _, err := r.Read(value) require.NoError(err) } key := genKey() @@ -1282,10 +771,10 @@ func generateTrieWithMinKeyLen(t *testing.T, r *rand.Rand, count int, minKeyLen } allKeys = append(allKeys, key) seenKeys[string(key)] = struct{}{} - if err = batch.Put(key, value); err != nil { - return db, err + if err := batch.Put(key, value); err != nil { + return err } i++ } - return db, batch.Write() + return batch.Write() }