diff --git a/.github/workflows/golangci-lint.yml b/.github/workflows/golangci-lint.yml index ac7d9c40..c5142d4c 100644 --- a/.github/workflows/golangci-lint.yml +++ b/.github/workflows/golangci-lint.yml @@ -3,7 +3,7 @@ on: push: branches: [ master ] pull_request: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] permissions: contents: read @@ -15,13 +15,14 @@ jobs: steps: - uses: actions/setup-go@v3 with: - go-version: 1.17.6 + go-version: 1.20.5 + cache: false - uses: actions/checkout@v3 - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: # Required: the version of golangci-lint is required and must be specified without patch version: we always use the latest patch version. - version: v1.45.2 + version: v1.54.2 # Optional: working directory, useful for monorepos # working-directory: somedir diff --git a/.github/workflows/pr-build.yml b/.github/workflows/pr-build.yml index d022d309..42dedd2b 100644 --- a/.github/workflows/pr-build.yml +++ b/.github/workflows/pr-build.yml @@ -3,9 +3,9 @@ name: Go build on: push: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] pull_request: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] jobs: build: diff --git a/.github/workflows/pr-integration-tests.yml b/.github/workflows/pr-integration-tests.yml index 54e20874..93500145 100644 --- a/.github/workflows/pr-integration-tests.yml +++ b/.github/workflows/pr-integration-tests.yml @@ -2,9 +2,9 @@ name: Integration tests on: push: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] pull_request: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] jobs: test-0: diff --git a/.github/workflows/pr-tests.yml b/.github/workflows/pr-tests.yml index 67f814e3..cb27be4b 100644 --- a/.github/workflows/pr-tests.yml +++ b/.github/workflows/pr-tests.yml @@ -2,9 +2,9 @@ name: Tests on: push: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] pull_request: - branches: [ master, development, feat/*, rc/* ] + branches: [ master, feat/*, rc/* ] jobs: test: diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..d4436ca2 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,26 @@ +FROM golang:1.20.5 as builder + +RUN apt-get update && apt-get install -y + +WORKDIR /multiversx +COPY . . + +WORKDIR /multiversx/cmd/elasticindexer + +RUN go build -o elasticindexer + +# ===== SECOND STAGE ====== +FROM ubuntu:22.04 +RUN apt-get update && apt-get install -y + +RUN useradd -m -u 1000 appuser +USER appuser + +COPY --from=builder /multiversx/cmd/elasticindexer /multiversx + +EXPOSE 22111 + +WORKDIR /multiversx + +ENTRYPOINT ["./elasticindexer"] +CMD ["--log-level", "*:DEBUG"] diff --git a/Makefile b/Makefile index 1dd35db8..a000f850 100644 --- a/Makefile +++ b/Makefile @@ -32,3 +32,13 @@ integration-tests-open-search: go test -v ./integrationtests -tags integrationtests cd scripts && /bin/bash script.sh delete cd scripts && /bin/bash script.sh stop_open_search + +INDEXER_IMAGE_NAME="elasticindexer" +INDEXER_IMAGE_TAG="latest" +DOCKER_FILE=Dockerfile + +docker-build: + docker build \ + -t ${INDEXER_IMAGE_NAME}:${INDEXER_IMAGE_TAG} \ + -f ${DOCKER_FILE} \ + . diff --git a/README.md b/README.md index c9e05ee8..68dfade9 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,148 @@ -#### mx-chain-es-indexer-go +## mx-chain-es-indexer-go -Is a go module that is used in mx-chain-go repository. +Is a versatile component designed to enhance the data management capabilities of the `mx-chain-go` repository. -This module is responsible for the interaction with the Elasticsearch database. +### Overview -mx-chain-es-indexer-go module will prepare all the information in a specific format for the external database. +This module's flexibility enables it to be used in two distinct modes: as a Go module or as a separate microservice. + +### Features + +- **Data Transformation**: `mx-chain-es-indexer-go` excels at transforming raw data generated by the `mx-chain-go` repository into a structured format suitable for indexing and storage. + +- **Indexing**: The module indexes the transformed data within Elasticsearch, enhancing query performance and data retrieval. + +- **Microservice Mode**: In microservice mode, mx-chain-es-indexer-go operates as a standalone microservice that communicates with mx-chain-go via WebSocket connections. This architecture promotes modularity and scalability. + +- **Go Module Mode**: The `mx-chain-go` repository can include the module directly. This makes it easy to manage data without requiring a separate microservice. + +### Running as a Separate Microservice + +Running the `mx-chain-es-indexer-go` module as a **microservice** allows you to efficiently manage data indexing and storage while maintaining +modularity and scalability. This guide outlines the steps to deploy the module as a separate microservice that communicates with +the `mx-chain-go` repository over `WebSocket` connections. + +### Monitoring Endpoints + +The `mx-chain-es-indexer-go` microservice provides monitoring endpoints that allow you to keep track of its health, performance, and other vital statistics. +These endpoints are essential for maintaining the stability and efficiency of the microservice. + +#### Monitoring Endpoints + +`/status/metrics` + +This endpoint exposes various metrics about the microservice's internal operations. These metrics are formatted in a way that is suitable for consumption +by monitoring and alerting systems. + +HTTP Method: **GET** + +Response: Metrics are presented in JSON format for easy integration with monitoring and alerting systems. + +`/status/prometheus-metrics` + +This endpoint provides Prometheus-compatible metrics in a specific format for easy integration with +Prometheus monitoring systems. + +HTTP Method: **GET** + +Response: Metrics are formatted in a way that Prometheus can scrape and ingest for monitoring and alerting purposes. + + + +### Prerequisites +Before proceeding, ensure you have the following prerequisites: +- Go programming environment set up. +- Access to an Elasticsearch database instance. +- One has to setup one or multiple observers. For running an observing squad, these [docs](https://docs.multiversx.com/integrators/observing-squad/) cover the whole process. +The required configs for launching an observer/s with a driver attached, can be found [here](https://github.com/multiversx/mx-chain-go/blob/master/cmd/node/config/external.toml). + +The corresponding config section for enabling the driver: + +```toml +[[HostDriversConfig]] + # This flag shall only be used for observer nodes + Enabled = true + # This flag will start the WebSocket connector as server or client (can be "client" or "server") + Mode = "client" + # URL for the WebSocket client/server connection + # This value represents the IP address and port number that the WebSocket client or server will use to establish a connection. + URL = "127.0.0.1:22111" + # After a message will be sent it will wait for an ack message if this flag is enabled + WithAcknowledge = true + # The duration in seconds to wait for an acknowledgment message, after this time passes an error will be returned + AcknowledgeTimeoutInSec = 60 + # Possible values: json, gogo protobuf. Should be compatible with mx-chain-es-indexer-go config + MarshallerType = "json" + # The number of seconds when the client will try again to send the data + RetryDurationInSec = 5 + # Sets if, in case of data payload processing error, we should block or not the advancement to the next processing event. Set this to true if you wish the node to stop processing blocks if the client/server encounters errors while processing requests. + BlockingAckOnError = true + # Set to true to drop messages if there is no active WebSocket connection to send to. + DropMessagesIfNoConnection = false +``` + + +#### Install +Using the `cmd/elasticindexer` package as root, execute the following commands: +- install go dependencies: `go install` +- build executable: `go build -o elasticindexer` + +#### Launching the elasticindexer + +CLI: run `--help` to get the command line parameters +``` +./elasticindexer --help +``` + +Before launching the `elasticindexer` service, it has to be configured so that it runs with the correct configuration. + +The **_[prefs.toml](./cmd/elasticindexer/config/prefs.toml)_** file: + +```toml +[config] + disabled-indices = [] + [config.web-socket] + # URL for the WebSocket client/server connection + # This value represents the IP address and port number that the WebSocket client or server will use to establish a connection. + url = "localhost:22111" + # This flag describes the mode to start the WebSocket connector. Can be "client" or "server" + mode = "server" + # Possible values: json, gogo protobuf. Should be compatible with mx-chain-node outport driver config + data-marshaller-type = "json" + # Retry duration (receive/send ack signal) in seconds + retry-duration-in-seconds = 5 + # Signals if in case of data payload processing error, we should send the ack signal or not + blocking-ack-on-error = true + # After a message will be sent it will wait for an ack message if this flag is enabled + with-acknowledge = true + # The duration in seconds to wait for an acknowledgment message, after this time passes an error will be returned + acknowledge-timeout-in-seconds = 50 + + [config.elastic-cluster] + use-kibana = false + url = "http://localhost:9200" + username = "" + password = "" + bulk-request-max-size-in-bytes = 4194304 # 4MB +``` + +The _**[api.toml](./cmd/elasticindexer/config/api.toml)**_ file: +```toml +rest-api-interface = ":8080" + +[api-packages] + +[api-packages.status] + routes = [ + { name = "/metrics", open = true }, + { name = "/prometheus-metrics", open = true } + ] +``` + +After the configuration file is set up, the `elasticindexer` instance can be launched. + +### Contribution + +Contributions to the `mx-chain-es-indexer-go` module are welcomed. Whether you're interested in improving its features, +extending its capabilities, or addressing issues, your contributions can help the community make the module even more robust. diff --git a/api/gin/httpServer.go b/api/gin/httpServer.go new file mode 100644 index 00000000..1f7d935f --- /dev/null +++ b/api/gin/httpServer.go @@ -0,0 +1,59 @@ +package gin + +import ( + "context" + "errors" + "net/http" + "time" + + logger "github.com/multiversx/mx-chain-logger-go" +) + +var log = logger.GetOrCreate("api/gin") + +// ErrNilHttpServer signals that a nil http server has been provided +var ErrNilHttpServer = errors.New("nil http server") + +type httpServer struct { + server server +} + +// NewHttpServer returns a new instance of httpServer +func NewHttpServer(server server) (*httpServer, error) { + if server == nil { + return nil, ErrNilHttpServer + } + + return &httpServer{ + server: server, + }, nil +} + +// Start will handle the starting of the gin web server. This call is blocking, and it should be +// called on a go routine (different from the main one) +func (h *httpServer) Start() { + err := h.server.ListenAndServe() + if err == nil { + return + } + + if err == http.ErrServerClosed { + log.Debug("ListenAndServe - webserver closed") + return + } + + log.Error("could not start webserver", "error", err.Error()) +} + +// Close will handle the stopping of the gin web server +func (h *httpServer) Close() error { + ctx, cancel := context.WithTimeout(context.Background(), time.Second) + defer cancel() + + return h.server.Shutdown(ctx) +} + +// IsInterfaceNil returns true if there is no value under the interface +func (h *httpServer) IsInterfaceNil() bool { + return h == nil +} diff --git a/api/gin/interface.go b/api/gin/interface.go new file mode 100644 index 00000000..6773fe65 --- /dev/null +++ b/api/gin/interface.go @@ -0,0 +1,8 @@ +package gin + +import "context" + +type server interface { + ListenAndServe() error + Shutdown(ctx context.Context) error +} diff --git a/api/gin/webServer.go b/api/gin/webServer.go new file mode 100644 index 00000000..048442e6 --- /dev/null +++ b/api/gin/webServer.go @@ -0,0 +1,126 @@ +package gin + +import ( + "fmt" + "net/http" + "sync" + + "github.com/gin-contrib/cors" + "github.com/gin-gonic/gin" + "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-es-indexer-go/api/groups" + "github.com/multiversx/mx-chain-es-indexer-go/api/shared" + "github.com/multiversx/mx-chain-es-indexer-go/config" +) + +const ( + webServerOffString = "off" +) + +// ArgsWebServer holds the arguments needed for a webServer +type ArgsWebServer struct { + Facade shared.FacadeHandler + ApiConfig config.ApiRoutesConfig +} + +type webServer struct { + sync.RWMutex + facade shared.FacadeHandler + apiConfig config.ApiRoutesConfig + groups map[string]shared.GroupHandler + httpServer shared.HttpServerCloser +} + +// NewWebServer will create a new instance of the webServer +func NewWebServer(args ArgsWebServer) (*webServer, error) { + return &webServer{ + facade: args.Facade, + apiConfig: args.ApiConfig, + }, nil +} + +// StartHttpServer will start the http server +func (ws *webServer) StartHttpServer() error { + ws.Lock() + defer ws.Unlock() + + apiInterface := ws.apiConfig.RestApiInterface + if apiInterface == webServerOffString { + log.Debug("web server is turned off") + return nil + } + + var engine *gin.Engine + gin.DefaultWriter = &ginWriter{} + gin.DefaultErrorWriter = &ginErrorWriter{} + gin.DisableConsoleColor() + gin.SetMode(gin.ReleaseMode) + + engine = gin.Default() + cfg := cors.DefaultConfig() + cfg.AllowAllOrigins = true + cfg.AddAllowHeaders("Authorization") + engine.Use(cors.New(cfg)) + + err := ws.createGroups() + if err != nil { + return err + } + + ws.registerRoutes(engine) + + s := &http.Server{Addr: apiInterface, Handler: engine} + log.Debug("creating gin web sever", "interface", apiInterface) + ws.httpServer, err = NewHttpServer(s) + if err != nil { + return err + } + + log.Debug("starting web server") + go ws.httpServer.Start() + + return nil +} + +func (ws *webServer) createGroups() error { + groupsMap := make(map[string]shared.GroupHandler) + + statusGroup, err := groups.NewStatusGroup(ws.facade) + if err != nil { + return err + } + groupsMap["status"] = statusGroup + + ws.groups = groupsMap + + return nil +} + +func (ws *webServer) registerRoutes(ginRouter *gin.Engine) { + for groupName, groupHandler := range ws.groups { + log.Debug("registering gin API group", "group name", groupName) + ginGroup := ginRouter.Group(fmt.Sprintf("/%s", groupName)) + groupHandler.RegisterRoutes(ginGroup, ws.apiConfig) + } +} + +// Close will handle the closing of inner components +func (ws *webServer) Close() error { + var err error + ws.Lock() + if !check.IfNil(ws.httpServer) { + err = ws.httpServer.Close() + } + ws.Unlock() + + if err != nil { + return fmt.Errorf("%w while closing the http server in gin/webServer", err) + } + + return nil +} + +// IsInterfaceNil returns true if there is no value under the interface +func (ws *webServer) IsInterfaceNil() bool { + return ws == nil +} diff --git a/api/gin/writters.go b/api/gin/writters.go new file mode 100644 index 00000000..ea73cceb --- /dev/null +++ b/api/gin/writters.go @@ -0,0 +1,25 @@ +package gin + +import "bytes" + +type ginWriter struct { +} + +// Write will output the message using mx-chain-logger-go +func (gv *ginWriter) Write(p []byte) (n int, err error) { + trimmed := bytes.TrimSpace(p) + log.Trace("gin server", "message", string(trimmed)) + + return len(p), nil +} + +type ginErrorWriter struct { +} + +// Write will output the error using mx-chain-logger-go +func (gev *ginErrorWriter) Write(p []byte) (n int, err error) { + trimmed := bytes.TrimSpace(p) + log.Trace("gin server", "error", string(trimmed)) + + return len(p), nil +} diff --git a/api/groups/baseGroup.go b/api/groups/baseGroup.go new file mode 100644 index 00000000..a5b03180 --- /dev/null +++ b/api/groups/baseGroup.go @@ -0,0 +1,69 @@ +package groups + +import ( + "strings" + + "github.com/gin-gonic/gin" + "github.com/multiversx/mx-chain-es-indexer-go/api/shared" + "github.com/multiversx/mx-chain-es-indexer-go/config" + logger "github.com/multiversx/mx-chain-logger-go" +) + +var log = logger.GetOrCreate("api/groups") + +type endpointProperties struct { + isOpen bool +} + +type baseGroup struct { + endpoints []*shared.EndpointHandlerData +} + +// GetEndpoints returns all the providers specific to the group +func (bg *baseGroup) GetEndpoints() []*shared.EndpointHandlerData { + return bg.endpoints +} + +// RegisterRoutes will register all the providers to the given web server +func (bg *baseGroup) RegisterRoutes( + ws *gin.RouterGroup, + apiConfig config.ApiRoutesConfig, +) { + for _, handlerData := range bg.endpoints { + properties := getEndpointProperties(ws, handlerData.Path, apiConfig) + + if !properties.isOpen { + log.Debug("endpoint is closed", "path", handlerData.Path) + continue + } + + ws.Handle(handlerData.Method, handlerData.Path, handlerData.Handler) + } +} + +func getEndpointProperties(ws *gin.RouterGroup, path string, apiConfig config.ApiRoutesConfig) endpointProperties { + basePath := ws.BasePath() + + // ws.BasePath will return paths like /group or /v1.0/group, so we need the last token after splitting by / + splitPath := strings.Split(basePath, "/") + basePath = splitPath[len(splitPath)-1] + + group, ok := apiConfig.APIPackages[basePath] + if !ok { + return endpointProperties{ + isOpen: false, + } + } + + for _, route := range group.Routes { + if route.Name == path { + return endpointProperties{ + isOpen: route.Open, + } + } + } + + return endpointProperties{ + isOpen: false, + } +} diff --git a/api/groups/statusGroup.go b/api/groups/statusGroup.go new file mode 100644 index 00000000..4f9e15f0 --- /dev/null +++ b/api/groups/statusGroup.go @@ -0,0 +1,79 @@ +package groups + +import ( + "fmt" + "net/http" + + "github.com/gin-gonic/gin" + "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-es-indexer-go/api/shared" + "github.com/multiversx/mx-chain-es-indexer-go/core" +) + +const ( + metricsPath = "/metrics" + prometheusMetricsPath = "/prometheus-metrics" +) + +type statusGroup struct { + *baseGroup + facade shared.FacadeHandler +} + +// NewStatusGroup returns a new instance of status group +func NewStatusGroup(facade shared.FacadeHandler) (*statusGroup, error) { + if check.IfNil(facade) { + return nil, fmt.Errorf("%w for status group", core.ErrNilFacadeHandler) + } + + sg := &statusGroup{ + facade: facade, + baseGroup: &baseGroup{}, + } + + endpoints := []*shared.EndpointHandlerData{ + { + Path: metricsPath, + Handler: sg.getMetrics, + Method: http.MethodGet, + }, + { + Path: prometheusMetricsPath, + Handler: sg.getPrometheusMetrics, + Method: http.MethodGet, + }, + } + sg.endpoints = endpoints + + return sg, nil +} + +// getMetrics will expose endpoints statistics in json format +func (sg *statusGroup) getMetrics(c *gin.Context) { + metricsResults := sg.facade.GetMetrics() + + returnStatus(c, gin.H{"metrics": metricsResults}, http.StatusOK, "", "successful") +} + +// getPrometheusMetrics will expose proxy metrics in prometheus format +func (sg *statusGroup) getPrometheusMetrics(c *gin.Context) { + metricsResults := sg.facade.GetMetricsForPrometheus() + + c.String(http.StatusOK, metricsResults) +} + +// IsInterfaceNil returns true if there is no value under the interface +func (sg *statusGroup) IsInterfaceNil() bool { + return sg == nil +} + +func returnStatus(c *gin.Context, data interface{}, httpStatus int, err string, code string) { + c.JSON( + httpStatus, + shared.GenericAPIResponse{ + Data: data, + Error: err, + Code: code, + }, + ) +} diff --git a/api/shared/interface.go b/api/shared/interface.go new file mode 100644 index 00000000..98aea754 --- /dev/null +++ b/api/shared/interface.go @@ -0,0 +1,30 @@ +package shared + +import ( + "github.com/gin-gonic/gin" + "github.com/multiversx/mx-chain-es-indexer-go/config" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" +) + +// GroupHandler defines the actions needed to be performed by a gin API group +type GroupHandler interface { + RegisterRoutes( + ws *gin.RouterGroup, + apiConfig config.ApiRoutesConfig, + ) + IsInterfaceNil() bool +} + +// FacadeHandler defines all the methods that a facade should implement +type FacadeHandler interface { + GetMetrics() map[string]*request.MetricsResponse + GetMetricsForPrometheus() string + IsInterfaceNil() bool +} + +// HttpServerCloser defines the basic actions of starting and closing that a web server should be able to do +type HttpServerCloser interface { + Start() + Close() error + IsInterfaceNil() bool +} diff --git a/api/shared/shared.go b/api/shared/shared.go new file mode 100644 index 00000000..e8cf9356 --- /dev/null +++ b/api/shared/shared.go @@ -0,0 +1,35 @@ +package shared + +import "github.com/gin-gonic/gin" + +// MiddlewarePosition is the type that specifies the position of a middleware relative to the base endpoint handler +type MiddlewarePosition bool + +const ( + // Before indicates that the middleware should be used before the base endpoint handler + Before MiddlewarePosition = true + + // After indicates that the middleware should be used after the base endpoint handler + After MiddlewarePosition = false +) + +// AdditionalMiddleware holds the data needed for adding a middleware to an API endpoint +type AdditionalMiddleware struct { + Middleware gin.HandlerFunc + Position MiddlewarePosition +} + +// EndpointHandlerData holds the items needed for creating a new gin HTTP endpoint +type EndpointHandlerData struct { + Path string + Method string + Handler gin.HandlerFunc + AdditionalMiddlewares []AdditionalMiddleware +} + +// GenericAPIResponse defines the structure of all responses on API endpoints +type GenericAPIResponse struct { + Data interface{} `json:"data"` + Error string `json:"error"` + Code string `json:"code"` +} diff --git a/client/elasticClient.go b/client/elasticClient.go index dc56be6e..3f0a66ed 100644 --- a/client/elasticClient.go +++ b/client/elasticClient.go @@ -93,18 +93,8 @@ func (ec *elasticClient) CheckAndCreateAlias(alias string, indexName string) err return ec.createAlias(alias, indexName) } -// DoRequest will do a request to elastic server -func (ec *elasticClient) DoRequest(req *esapi.IndexRequest) error { - res, err := req.Do(context.Background(), ec.client) - if err != nil { - return err - } - - return parseResponse(res, nil, elasticDefaultErrorResponseHandler) -} - // DoBulkRequest will do a bulk of request to elastic server -func (ec *elasticClient) DoBulkRequest(buff *bytes.Buffer, index string) error { +func (ec *elasticClient) DoBulkRequest(ctx context.Context, buff *bytes.Buffer, index string) error { reader := bytes.NewReader(buff.Bytes()) options := make([]func(*esapi.BulkRequest), 0) @@ -112,6 +102,8 @@ func (ec *elasticClient) DoBulkRequest(buff *bytes.Buffer, index string) error { options = append(options, ec.client.Bulk.WithIndex(index)) } + options = append(options, ec.client.Bulk.WithContext(ctx)) + res, err := ec.client.Bulk( reader, options..., @@ -126,7 +118,7 @@ func (ec *elasticClient) DoBulkRequest(buff *bytes.Buffer, index string) error { } // DoMultiGet wil do a multi get request to Elasticsearch server -func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, resBody interface{}) error { +func (ec *elasticClient) DoMultiGet(ctx context.Context, ids []string, index string, withSource bool, resBody interface{}) error { obj := getDocumentsByIDsQuery(ids, withSource) body, err := encode(obj) if err != nil { @@ -136,6 +128,7 @@ func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, res, err := ec.client.Mget( &body, ec.client.Mget.WithIndex(index), + ec.client.Mget.WithContext(ctx), ) if err != nil { log.Warn("elasticClient.DoMultiGet", @@ -154,9 +147,10 @@ func (ec *elasticClient) DoMultiGet(ids []string, index string, withSource bool, } // DoQueryRemove will do a query remove to elasticsearch server -func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { - if err := ec.doRefresh(index); err != nil { - log.Warn("elasticClient.doRefresh", "cannot do refresh", err.Error()) +func (ec *elasticClient) DoQueryRemove(ctx context.Context, index string, body *bytes.Buffer) error { + err := ec.doRefresh(index) + if err != nil { + log.Warn("elasticClient.doRefresh", "cannot do refresh", err) } res, err := ec.client.DeleteByQuery( @@ -164,16 +158,17 @@ func (ec *elasticClient) DoQueryRemove(index string, body *bytes.Buffer) error { body, ec.client.DeleteByQuery.WithIgnoreUnavailable(true), ec.client.DeleteByQuery.WithConflicts(esConflictsPolicy), + ec.client.DeleteByQuery.WithContext(ctx), ) if err != nil { - log.Warn("elasticClient.DoQueryRemove", "cannot do query remove", err.Error()) + log.Warn("elasticClient.DoQueryRemove", "cannot do query remove", err) return err } err = parseResponse(res, nil, elasticDefaultErrorResponseHandler) if err != nil { - log.Warn("elasticClient.DoQueryRemove", "error parsing response", err.Error()) + log.Warn("elasticClient.DoQueryRemove", "error parsing response", err) return err } @@ -329,12 +324,12 @@ func (ec *elasticClient) createAlias(alias string, index string) error { } // UpdateByQuery will update all the documents that match the provided query from the provided index -func (ec *elasticClient) UpdateByQuery(index string, buff *bytes.Buffer) error { +func (ec *elasticClient) UpdateByQuery(ctx context.Context, index string, buff *bytes.Buffer) error { reader := bytes.NewReader(buff.Bytes()) - res, err := ec.client.UpdateByQuery( []string{index}, ec.client.UpdateByQuery.WithBody(reader), + ec.client.UpdateByQuery.WithContext(ctx), ) if err != nil { return err diff --git a/client/elasticClientScroll.go b/client/elasticClientScroll.go index 1bda8a88..1e1c5037 100644 --- a/client/elasticClientScroll.go +++ b/client/elasticClientScroll.go @@ -14,10 +14,11 @@ import ( ) // DoCountRequest will get the number of elements that correspond with the provided query -func (ec *elasticClient) DoCountRequest(index string, body []byte) (uint64, error) { +func (ec *elasticClient) DoCountRequest(ctx context.Context, index string, body []byte) (uint64, error) { res, err := ec.client.Count( ec.client.Count.WithIndex(index), ec.client.Count.WithBody(bytes.NewBuffer(body)), + ec.client.Count.WithContext(ctx), ) if err != nil { return 0, err @@ -38,6 +39,7 @@ func (ec *elasticClient) DoCountRequest(index string, body []byte) (uint64, erro // DoScrollRequest will perform a documents request using scroll api func (ec *elasticClient) DoScrollRequest( + ctx context.Context, index string, body []byte, withSource bool, @@ -51,6 +53,7 @@ func (ec *elasticClient) DoScrollRequest( ec.client.Search.WithIndex(index), ec.client.Search.WithBody(bytes.NewBuffer(body)), ec.client.Search.WithSource(strconv.FormatBool(withSource)), + ec.client.Search.WithContext(ctx), ) if err != nil { return err diff --git a/client/elasticClientScroll_test.go b/client/elasticClientScroll_test.go index 013ef83f..b0fe3d7b 100644 --- a/client/elasticClientScroll_test.go +++ b/client/elasticClientScroll_test.go @@ -1,6 +1,7 @@ package client import ( + "context" "io/ioutil" "net/http" "net/http/httptest" @@ -32,7 +33,7 @@ func TestElasticClient_DoCountRequest(t *testing.T) { Logger: &logging.CustomLogger{}, }) - count, err := esClient.DoCountRequest("tokens", []byte(`{}`)) + count, err := esClient.DoCountRequest(context.Background(), "tokens", []byte(`{}`)) require.Nil(t, err) require.Equal(t, uint64(112671), count) } diff --git a/client/elasticClient_test.go b/client/elasticClient_test.go index 67dbb494..96b552ec 100644 --- a/client/elasticClient_test.go +++ b/client/elasticClient_test.go @@ -1,6 +1,7 @@ package client import ( + "context" "io/ioutil" "net/http" "net/http/httptest" @@ -63,12 +64,12 @@ func TestElasticClient_DoMultiGet(t *testing.T) { ids := []string{"id"} res := &data.ResponseTokens{} - err := esClient.DoMultiGet(ids, "tokens", true, res) + err := esClient.DoMultiGet(context.Background(), ids, "tokens", true, res) require.Nil(t, err) require.Len(t, res.Docs, 3) resMap := make(objectsMap) - err = esClient.DoMultiGet(ids, "tokens", true, &resMap) + err = esClient.DoMultiGet(context.Background(), ids, "tokens", true, &resMap) require.Nil(t, err) _, ok := resMap["docs"] diff --git a/client/transport/transport.go b/client/transport/transport.go new file mode 100644 index 00000000..3b8542e8 --- /dev/null +++ b/client/transport/transport.go @@ -0,0 +1,67 @@ +package transport + +import ( + "errors" + "fmt" + "net/http" + "time" + + "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-es-indexer-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" + "github.com/multiversx/mx-chain-es-indexer-go/metrics" +) + +var errNilRequest = errors.New("nil request") + +type metricsTransport struct { + statusMetrics core.StatusMetricsHandler + transport http.RoundTripper +} + +// NewMetricsTransport will create a new instance of metricsTransport +func NewMetricsTransport(statusMetrics core.StatusMetricsHandler) (*metricsTransport, error) { + if check.IfNil(statusMetrics) { + return nil, core.ErrNilMetricsHandler + } + + return &metricsTransport{ + statusMetrics: statusMetrics, + transport: http.DefaultTransport, + }, nil +} + +// RoundTrip implements the http.RoundTripper interface and is used as a wrapper around the underlying +// transport to collect and record metrics related to the HTTP request/response cycle. +func (m *metricsTransport) RoundTrip(req *http.Request) (*http.Response, error) { + if req == nil { + return nil, errNilRequest + } + + startTime := time.Now() + size := req.ContentLength + + var statusCode int + resp, err := m.transport.RoundTrip(req) + if err == nil { + statusCode = resp.StatusCode + } + + duration := time.Since(startTime) + + valueFromCtx := req.Context().Value(request.ContextKey) + if valueFromCtx == nil { + return resp, err + } + topic := fmt.Sprintf("%s", valueFromCtx) + + m.statusMetrics.AddIndexingData(metrics.ArgsAddIndexingData{ + StatusCode: statusCode, + GotError: err != nil, + MessageLen: uint64(size), + Topic: topic, + Duration: duration, + }) + + return resp, err +} diff --git a/client/transport/transport_test.go b/client/transport/transport_test.go new file mode 100644 index 00000000..8be7b196 --- /dev/null +++ b/client/transport/transport_test.go @@ -0,0 +1,105 @@ +package transport + +import ( + "bytes" + "context" + "errors" + "net/http" + "testing" + + "github.com/multiversx/mx-chain-es-indexer-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" + "github.com/multiversx/mx-chain-es-indexer-go/metrics" + "github.com/multiversx/mx-chain-es-indexer-go/mock" + "github.com/stretchr/testify/require" +) + +func TestNewMetricsTransport(t *testing.T) { + t.Parallel() + + transportHandler, err := NewMetricsTransport(nil) + require.Nil(t, transportHandler) + require.Equal(t, core.ErrNilMetricsHandler, err) + + metricsHandler := metrics.NewStatusMetrics() + transportHandler, err = NewMetricsTransport(metricsHandler) + require.Nil(t, err) + require.NotNil(t, transportHandler) +} + +func TestMetricsTransport_NilRequest(t *testing.T) { + metricsHandler := metrics.NewStatusMetrics() + transportHandler, _ := NewMetricsTransport(metricsHandler) + + _, err := transportHandler.RoundTrip(nil) + require.Equal(t, errNilRequest, err) +} + +func TestMetricsTransport_RoundTripNilResponseShouldWork(t *testing.T) { + t.Parallel() + + metricsHandler := metrics.NewStatusMetrics() + transportHandler, _ := NewMetricsTransport(metricsHandler) + + testErr := errors.New("test") + transportHandler.transport = &mock.TransportMock{ + Response: nil, + Err: testErr, + } + + testTopic := "test" + contextWithValue := context.WithValue(context.Background(), request.ContextKey, testTopic) + req, _ := http.NewRequestWithContext(contextWithValue, http.MethodGet, "dummy", bytes.NewBuffer([]byte("test"))) + + _, _ = transportHandler.RoundTrip(req) + + metricsMap := metricsHandler.GetMetrics() + require.Equal(t, uint64(1), metricsMap[testTopic].OperationsCount) + require.Equal(t, uint64(1), metricsMap[testTopic].TotalErrorsCount) + require.Equal(t, uint64(4), metricsMap[testTopic].TotalData) +} + +func TestMetricsTransport_RoundTrip(t *testing.T) { + t.Parallel() + + metricsHandler := metrics.NewStatusMetrics() + transportHandler, _ := NewMetricsTransport(metricsHandler) + + transportHandler.transport = &mock.TransportMock{ + Response: &http.Response{ + StatusCode: http.StatusOK, + }, + Err: nil, + } + + testTopic := "test" + contextWithValue := context.WithValue(context.Background(), request.ContextKey, testTopic) + req, _ := http.NewRequestWithContext(contextWithValue, http.MethodGet, "dummy", bytes.NewBuffer([]byte("test"))) + + _, _ = transportHandler.RoundTrip(req) + + metricsMap := metricsHandler.GetMetrics() + require.Equal(t, uint64(1), metricsMap[testTopic].OperationsCount) + require.Equal(t, uint64(0), metricsMap[testTopic].TotalErrorsCount) + require.Equal(t, uint64(4), metricsMap[testTopic].TotalData) +} + +func TestMetricsTransport_RoundTripNoValueInContextShouldNotAddMetrics(t *testing.T) { + t.Parallel() + + metricsHandler := metrics.NewStatusMetrics() + transportHandler, _ := NewMetricsTransport(metricsHandler) + + transportHandler.transport = &mock.TransportMock{ + Response: &http.Response{ + StatusCode: http.StatusOK, + }, + Err: nil, + } + + req, _ := http.NewRequestWithContext(context.Background(), http.MethodGet, "dummy", bytes.NewBuffer([]byte("test"))) + _, _ = transportHandler.RoundTrip(req) + + metricsMap := metricsHandler.GetMetrics() + require.Len(t, metricsMap, 0) +} diff --git a/cmd/elasticindexer/config/api.toml b/cmd/elasticindexer/config/api.toml new file mode 100644 index 00000000..dd41e8f5 --- /dev/null +++ b/cmd/elasticindexer/config/api.toml @@ -0,0 +1,9 @@ +rest-api-interface = ":8080" + +[api-packages] + +[api-packages.status] + routes = [ + { name = "/metrics", open = true }, + { name = "/prometheus-metrics", open = true } + ] diff --git a/cmd/elasticindexer/config/config.toml b/cmd/elasticindexer/config/config.toml index 713f0e00..35fa0702 100644 --- a/cmd/elasticindexer/config/config.toml +++ b/cmd/elasticindexer/config/config.toml @@ -2,11 +2,12 @@ available-indices = [ "rating", "transactions", "blocks", "validators", "miniblocks", "rounds", "accounts", "accountshistory", "receipts", "scresults", "accountsesdt", "accountsesdthistory", "epochinfo", "scdeploys", "tokens", "tags", - "logs", "delegators", "operations" + "logs", "delegators", "operations", "esdts" ] [config.address-converter] length = 32 type = "bech32" + prefix = "erd" [config.validator-keys-converter] length = 96 type = "hex" diff --git a/cmd/elasticindexer/config/prefs.toml b/cmd/elasticindexer/config/prefs.toml index 3dda19cb..2685e4b8 100644 --- a/cmd/elasticindexer/config/prefs.toml +++ b/cmd/elasticindexer/config/prefs.toml @@ -1,8 +1,21 @@ [config] disabled-indices = [] [config.web-socket] - server-url = "localhost:22111" + # URL for the WebSocket client/server connection + # This value represents the IP address and port number that the WebSocket client or server will use to establish a connection. + url = "localhost:22111" + # This flag describes the mode to start the WebSocket connector. Can be "client" or "server" + mode = "server" + # Possible values: json, gogo protobuf. Should be compatible with mx-chain-node outport driver config data-marshaller-type = "json" + # Retry duration (receive/send ack signal) in seconds + retry-duration-in-seconds = 5 + # Signals if in case of data payload processing error, we should send the ack signal or not + blocking-ack-on-error = true + # After a message will be sent it will wait for an ack message if this flag is enabled + with-acknowledge = true + # The duration in seconds to wait for an acknowledgment message, after this time passes an error will be returned + acknowledge-timeout-in-seconds = 50 [config.elastic-cluster] use-kibana = false diff --git a/cmd/elasticindexer/flags.go b/cmd/elasticindexer/flags.go index 4a82dd5e..a8c6ed86 100644 --- a/cmd/elasticindexer/flags.go +++ b/cmd/elasticindexer/flags.go @@ -5,6 +5,10 @@ import ( "github.com/urfave/cli" ) +const ( + filePathPlaceholder = "[path]" +) + var ( configurationFile = cli.StringFlag{ Name: "config", @@ -14,10 +18,18 @@ var ( // configurationPreferencesFile defines a flag for the path to the preferences toml configuration file configurationPreferencesFile = cli.StringFlag{ Name: "config-preferences", - Usage: "The [path] for the preferences configuration file. This TOML file contains " + + Usage: "The `" + filePathPlaceholder + "` for the preferences configuration file. This TOML file contains " + "preferences configurations, such as the node display name or the shard to start in when starting as observer", Value: "./config/prefs.toml", } + // configurationApiFile defines a flag for the path to the api routes toml configuration file + configurationApiFile = cli.StringFlag{ + Name: "config-api", + Usage: "The `" + filePathPlaceholder + "` for the api configuration file. This TOML file contains " + + "all available routes for Rest API and options to enable or disable them.", + Value: "config/api.toml", + } + logLevel = cli.StringFlag{ Name: "log-level", Usage: "This flag specifies the logger `level(s)`. It can contain multiple comma-separated value. For example" + @@ -35,4 +47,10 @@ var ( Name: "disable-ansi-color", Usage: "Boolean option for disabling ANSI colors in the logging system.", } + importDB = cli.BoolFlag{ + Name: "import-db", + Usage: "This flag, when enabled, triggers the indexer to operate in import database mode. In this mode," + + " the indexer excludes the indexing of cross shard transactions received from the source shard. " + + "This flag must be enabled when the observers are in import database mode.", + } ) diff --git a/cmd/elasticindexer/main.go b/cmd/elasticindexer/main.go index 518644ea..cea8460b 100644 --- a/cmd/elasticindexer/main.go +++ b/cmd/elasticindexer/main.go @@ -10,8 +10,11 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" "github.com/multiversx/mx-chain-core-go/core/closing" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/config" "github.com/multiversx/mx-chain-es-indexer-go/factory" + "github.com/multiversx/mx-chain-es-indexer-go/metrics" + "github.com/multiversx/mx-chain-es-indexer-go/process/wsindexer" logger "github.com/multiversx/mx-chain-logger-go" "github.com/multiversx/mx-chain-logger-go/file" "github.com/urfave/cli" @@ -44,9 +47,11 @@ func main() { app.Flags = []cli.Flag{ configurationFile, configurationPreferencesFile, + configurationApiFile, logLevel, logSaveFile, disableAnsiColor, + importDB, } app.Authors = []cli.Author{ { @@ -67,32 +72,61 @@ func main() { func startIndexer(ctx *cli.Context) error { cfg, err := loadMainConfig(ctx.GlobalString(configurationFile.Name)) if err != nil { - return err + return fmt.Errorf("%w while loading the config file", err) } clusterCfg, err := loadClusterConfig(ctx.GlobalString(configurationPreferencesFile.Name)) if err != nil { - return err + return fmt.Errorf("%w while loading the preferences config file", err) } fileLogging, err := initializeLogger(ctx, cfg) if err != nil { - return err + return fmt.Errorf("%w while initializing the logger", err) + } + + importDBMode := ctx.GlobalBool(importDB.Name) + statusMetrics := metrics.NewStatusMetrics() + wsHost, err := factory.CreateWsIndexer(cfg, clusterCfg, importDBMode, statusMetrics) + if err != nil { + return fmt.Errorf("%w while creating the indexer", err) + } + + apiConfig, err := loadApiConfig(ctx.GlobalString(configurationApiFile.Name)) + if err != nil { + return fmt.Errorf("%w while loading the api config file", err) } - wsClient, err := factory.CreateWsIndexer(cfg, clusterCfg) + webServer, err := factory.CreateWebServer(apiConfig, statusMetrics) if err != nil { - log.Error("cannot create ws indexer", "error", err) + return fmt.Errorf("%w while creating the web server", err) + } + + err = webServer.StartHttpServer() + if err != nil { + return fmt.Errorf("%w while starting the web server", err) } interrupt := make(chan os.Signal, 1) signal.Notify(interrupt, syscall.SIGINT, syscall.SIGTERM) - go wsClient.Start() + retryDuration := time.Duration(clusterCfg.Config.WebSocket.RetryDurationInSec) * time.Second + closed := requestSettings(wsHost, retryDuration, interrupt) + if !closed { + <-interrupt + } - <-interrupt log.Info("closing app at user's signal") - wsClient.Close() + err = wsHost.Close() + if err != nil { + log.Error("cannot close ws indexer", "error", err) + } + + err = webServer.Close() + if err != nil { + log.Error("cannot close web server", "error", err) + } + if !check.IfNilReflect(fileLogging) { err = fileLogging.Close() log.LogIfError(err) @@ -100,6 +134,27 @@ func startIndexer(ctx *cli.Context) error { return nil } +func requestSettings(host wsindexer.WSClient, retryDuration time.Duration, close chan os.Signal) bool { + timer := time.NewTimer(0) + defer timer.Stop() + + emptyMessage := make([]byte, 0) + for { + select { + case <-timer.C: + err := host.Send(emptyMessage, outport.TopicSettings) + if err == nil { + return false + } + log.Debug("unable to request settings - will retry", "error", err) + + timer.Reset(retryDuration) + case <-close: + return true + } + } +} + func loadMainConfig(filepath string) (config.Config, error) { cfg := config.Config{} err := core.LoadTomlFile(&cfg, filepath) @@ -114,6 +169,17 @@ func loadClusterConfig(filepath string) (config.ClusterConfig, error) { return cfg, err } +// loadApiConfig returns a ApiRoutesConfig by reading the config file provided +func loadApiConfig(filepath string) (config.ApiRoutesConfig, error) { + cfg := config.ApiRoutesConfig{} + err := core.LoadTomlFile(&cfg, filepath) + if err != nil { + return config.ApiRoutesConfig{}, err + } + + return cfg, nil +} + func initializeLogger(ctx *cli.Context, cfg config.Config) (closing.Closer, error) { logLevelFlagValue := ctx.GlobalString(logLevel.Name) err := logger.SetLogLevel(logLevelFlagValue) diff --git a/config/config.go b/config/config.go index b726953f..dc1ab2cb 100644 --- a/config/config.go +++ b/config/config.go @@ -7,6 +7,7 @@ type Config struct { AddressConverter struct { Length int `toml:"length"` Type string `toml:"type"` + Prefix string `toml:"prefix"` } `toml:"address-converter"` ValidatorKeysConverter struct { Length int `toml:"length"` @@ -35,8 +36,13 @@ type ClusterConfig struct { Config struct { DisabledIndices []string `toml:"disabled-indices"` WebSocket struct { - ServerURL string `toml:"server-url"` + URL string `toml:"url"` + Mode string `toml:"mode"` DataMarshallerType string `toml:"data-marshaller-type"` + RetryDurationInSec uint32 `toml:"retry-duration-in-seconds"` + BlockingAckOnError bool `toml:"blocking-ack-on-error"` + WithAcknowledge bool `toml:"with-acknowledge"` + AckTimeoutInSec uint32 `toml:"acknowledge-timeout-in-seconds"` } `toml:"web-socket"` ElasticCluster struct { UseKibana bool `toml:"use-kibana"` @@ -47,3 +53,20 @@ type ClusterConfig struct { } `toml:"elastic-cluster"` } `toml:"config"` } + +// ApiRoutesConfig holds the configuration related to Rest API routes +type ApiRoutesConfig struct { + RestApiInterface string `toml:"rest-api-interface"` + APIPackages map[string]APIPackageConfig `toml:"api-packages"` +} + +// APIPackageConfig holds the configuration for the routes of each package +type APIPackageConfig struct { + Routes []RouteConfig `toml:"routes"` +} + +// RouteConfig holds the configuration for a single route +type RouteConfig struct { + Name string `toml:"name"` + Open bool `toml:"open"` +} diff --git a/core/errors.go b/core/errors.go new file mode 100644 index 00000000..45e554d7 --- /dev/null +++ b/core/errors.go @@ -0,0 +1,9 @@ +package core + +import "errors" + +// ErrNilMetricsHandler signals that a nil metrics handler has been provided +var ErrNilMetricsHandler = errors.New("nil metrics handler") + +// ErrNilFacadeHandler signal that a nil facade handler has been provided +var ErrNilFacadeHandler = errors.New("nil facade handler") diff --git a/core/interface.go b/core/interface.go new file mode 100644 index 00000000..b233db88 --- /dev/null +++ b/core/interface.go @@ -0,0 +1,20 @@ +package core + +import ( + "github.com/multiversx/mx-chain-es-indexer-go/core/request" + "github.com/multiversx/mx-chain-es-indexer-go/metrics" +) + +// StatusMetricsHandler defines the behavior of a component that handles status metrics +type StatusMetricsHandler interface { + AddIndexingData(args metrics.ArgsAddIndexingData) + GetMetrics() map[string]*request.MetricsResponse + GetMetricsForPrometheus() string + IsInterfaceNil() bool +} + +// WebServerHandler defines the behavior of a component that handles the web server +type WebServerHandler interface { + StartHttpServer() error + Close() error +} diff --git a/core/request/requests.go b/core/request/requests.go new file mode 100644 index 00000000..717bc66c --- /dev/null +++ b/core/request/requests.go @@ -0,0 +1,59 @@ +package request + +import ( + "fmt" + "strconv" + "strings" + "time" +) + +// StringKeyType defines the type for the context key +type StringKeyType string + +const ( + noShardID = "#" + // ContextKey the key for the value that will be added in the context + ContextKey StringKeyType = "key" + separator string = "_" + // RemoveTopic is the identifier for the remove requests metrics + RemoveTopic string = "req_remove" + // GetTopic is the identifier for the get requests metrics + GetTopic string = "req_get" + // BulkTopic is the identifier for the bulk requests metrics + BulkTopic string = "req_bulk" + // UpdateTopic is the identifier for the update requests metrics + UpdateTopic string = "req_update" + // ScrollTopic is the identifier for the scroll requests metrics + ScrollTopic string = "req_scroll" +) + +// MetricsResponse defines the response for status metrics endpoint +type MetricsResponse struct { + TotalData uint64 `json:"total_data"` + OperationsCount uint64 `json:"operations_count"` + TotalErrorsCount uint64 `json:"total_errors_count"` + ErrorsCount map[int]uint64 `json:"errors_count,omitempty"` + TotalIndexingTime time.Duration `json:"total_time"` +} + +// ExtendTopicWithShardID will concatenate topic with shardID +func ExtendTopicWithShardID(topic string, shardID uint32) string { + return topic + separator + fmt.Sprintf("%d", shardID) +} + +// SplitTopicAndShardID will extract shard id from the provided topic +func SplitTopicAndShardID(topicWithShardID string) (string, string) { + split := strings.Split(topicWithShardID, separator) + if len(split) < 2 { + return topicWithShardID, noShardID + } + + shardIDIndex := len(split) - 1 + shardIDStr := split[shardIDIndex] + _, err := strconv.ParseUint(shardIDStr, 10, 32) + if err != nil { + return topicWithShardID, noShardID + } + + return strings.Join(split[:shardIDIndex], separator), shardIDStr +} diff --git a/core/request/requests_test.go b/core/request/requests_test.go new file mode 100644 index 00000000..d824c1ae --- /dev/null +++ b/core/request/requests_test.go @@ -0,0 +1,37 @@ +package request + +import ( + "fmt" + "testing" + + "github.com/multiversx/mx-chain-core-go/core" + "github.com/stretchr/testify/require" +) + +func TestExtendTopicWithShardID(t *testing.T) { + t.Parallel() + + require.Equal(t, "req_update_2", ExtendTopicWithShardID(UpdateTopic, 2)) + require.Equal(t, "req_bulk_0", ExtendTopicWithShardID(BulkTopic, 0)) + require.Equal(t, "req_get_1", ExtendTopicWithShardID(GetTopic, 1)) + require.Equal(t, "req_scroll_4294967295", ExtendTopicWithShardID(ScrollTopic, core.MetachainShardId)) +} + +func TestSplitTopicAndShardID(t *testing.T) { + + topic, shardID := SplitTopicAndShardID("req_update_2") + require.Equal(t, UpdateTopic, topic) + require.Equal(t, "2", shardID) + + topic, shardID = SplitTopicAndShardID("req_scroll_4294967295") + require.Equal(t, ScrollTopic, topic) + require.Equal(t, fmt.Sprintf("%d", core.MetachainShardId), shardID) + + topic, shardID = SplitTopicAndShardID("req") + require.Equal(t, "req", topic) + require.Equal(t, noShardID, shardID) + + topic, shardID = SplitTopicAndShardID("req_aaaa") + require.Equal(t, "req_aaaa", topic) + require.Equal(t, noShardID, shardID) +} diff --git a/data/account.go b/data/account.go index 5175d6cd..f28ff087 100644 --- a/data/account.go +++ b/data/account.go @@ -3,34 +3,35 @@ package data import ( "time" - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" ) // AccountInfo holds (serializable) data about an account type AccountInfo struct { - Address string `json:"address,omitempty"` - Nonce uint64 `json:"nonce,omitempty"` - Balance string `json:"balance"` - BalanceNum float64 `json:"balanceNum"` - TokenName string `json:"token,omitempty"` - TokenIdentifier string `json:"identifier,omitempty"` - TokenNonce uint64 `json:"tokenNonce,omitempty"` - Properties string `json:"properties,omitempty"` - Frozen bool `json:"frozen,omitempty"` - TotalBalanceWithStake string `json:"totalBalanceWithStake,omitempty"` - TotalBalanceWithStakeNum float64 `json:"totalBalanceWithStakeNum,omitempty"` - Owner string `json:"owner,omitempty"` - UserName string `json:"userName,omitempty"` - DeveloperRewards string `json:"developerRewards,omitempty"` - DeveloperRewardsNum float64 `json:"developerRewardsNum,omitempty"` - Data *TokenMetaData `json:"data,omitempty"` - Timestamp time.Duration `json:"timestamp,omitempty"` - Type string `json:"type,omitempty"` - CurrentOwner string `json:"currentOwner,omitempty"` - ShardID uint32 `json:"shardID"` - IsSender bool `json:"-"` - IsSmartContract bool `json:"-"` - IsNFTCreate bool `json:"-"` + Address string `json:"address,omitempty"` + Nonce uint64 `json:"nonce,omitempty"` + Balance string `json:"balance"` + BalanceNum float64 `json:"balanceNum"` + TokenName string `json:"token,omitempty"` + TokenIdentifier string `json:"identifier,omitempty"` + TokenNonce uint64 `json:"tokenNonce,omitempty"` + Properties string `json:"properties,omitempty"` + Frozen bool `json:"frozen,omitempty"` + Owner string `json:"owner,omitempty"` + UserName string `json:"userName,omitempty"` + DeveloperRewards string `json:"developerRewards,omitempty"` + DeveloperRewardsNum float64 `json:"developerRewardsNum,omitempty"` + Data *TokenMetaData `json:"data,omitempty"` + Timestamp time.Duration `json:"timestamp,omitempty"` + Type string `json:"type,omitempty"` + CurrentOwner string `json:"currentOwner,omitempty"` + ShardID uint32 `json:"shardID"` + RootHash []byte `json:"rootHash,omitempty"` + CodeHash []byte `json:"codeHash,omitempty"` + CodeMetadata []byte `json:"codeMetadata,omitempty"` + IsSender bool `json:"-"` + IsSmartContract bool `json:"-"` + IsNFTCreate bool `json:"-"` } // TokenMetaData holds data about a token metadata @@ -62,13 +63,13 @@ type AccountBalanceHistory struct { // Account is a structure that is needed for regular accounts type Account struct { - UserAccount *outport.AlteredAccount + UserAccount *alteredAccount.AlteredAccount IsSender bool } // AccountESDT is a structure that is needed for ESDT accounts type AccountESDT struct { - Account *outport.AlteredAccount + Account *alteredAccount.AlteredAccount TokenIdentifier string NFTNonce uint64 IsSender bool diff --git a/data/block.go b/data/block.go index b163a623..9c71af27 100644 --- a/data/block.go +++ b/data/block.go @@ -5,8 +5,9 @@ import ( ) // Block is a structure containing all the fields that need -// to be saved for a block. It has all the default fields -// plus some extra information for ease of search and filter +// +// to be saved for a block. It has all the default fields +// plus some extra information for ease of search and filter type Block struct { Nonce uint64 `json:"nonce"` Round uint64 `json:"round"` @@ -37,6 +38,14 @@ type Block struct { MaxGasLimit uint64 `json:"maxGasLimit"` ScheduledData *ScheduledData `json:"scheduledData,omitempty"` EpochStartShardsData []*EpochStartShardData `json:"epochStartShardsData,omitempty"` + RandSeed string `json:"randSeed,omitempty"` + PrevRandSeed string `json:"prevRandSeed,omitempty"` + Signature string `json:"signature,omitempty"` + LeaderSignature string `json:"leaderSignature,omitempty"` + ChainID string `json:"chainID,omitempty"` + SoftwareVersion string `json:"softwareVersion,omitempty"` + ReceiptsHash string `json:"receiptsHash,omitempty"` + Reserved []byte `json:"reserved,omitempty"` } // MiniBlocksDetails is a structure that hold information about mini-blocks execution details diff --git a/data/data.go b/data/data.go index b0f661c0..e17042e6 100644 --- a/data/data.go +++ b/data/data.go @@ -24,7 +24,7 @@ type ValidatorRatingInfo struct { // RoundInfo is a structure containing block signers and shard id type RoundInfo struct { - Index uint64 `json:"round"` + Round uint64 `json:"round"` SignersIndexes []uint64 `json:"signersIndexes"` BlockWasProposed bool `json:"blockWasProposed"` ShardId uint32 `json:"shardId"` diff --git a/data/logs.go b/data/logs.go index d8a62ec6..d27fd466 100644 --- a/data/logs.go +++ b/data/logs.go @@ -3,6 +3,7 @@ package data import ( "time" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/tokeninfo" ) @@ -17,11 +18,12 @@ type Logs struct { // Event holds all the fields needed for an event structure type Event struct { - Address string `json:"address"` - Identifier string `json:"identifier"` - Topics [][]byte `json:"topics"` - Data []byte `json:"data"` - Order int `json:"order"` + Address string `json:"address"` + Identifier string `json:"identifier"` + Topics [][]byte `json:"topics"` + Data []byte `json:"data"` + AdditionalData [][]byte `json:"additionalData,omitempty"` + Order int `json:"order"` } // PreparedLogsResults is the DTO that holds all the results after processing @@ -29,7 +31,9 @@ type PreparedLogsResults struct { Tokens TokensHandler TokensSupply TokensHandler ScDeploys map[string]*ScDeployInfo + ChangeOwnerOperations map[string]*OwnerData Delegators map[string]*Delegator + TxHashStatusInfo map[string]*outport.StatusInfo TokensInfo []*TokenInfo NFTsDataUpdates []*NFTDataUpdate TokenRolesAndProperties *tokeninfo.TokenRolesAndProperties diff --git a/data/scDeploy.go b/data/scDeploy.go index 97c2800a..7a7bb69c 100644 --- a/data/scDeploy.go +++ b/data/scDeploy.go @@ -2,10 +2,13 @@ package data // ScDeployInfo is the DTO that holds information about a smart contract deployment type ScDeployInfo struct { - TxHash string `json:"deployTxHash"` - Creator string `json:"deployer"` - Timestamp uint64 `json:"timestamp"` - Upgrades []*Upgrade `json:"upgrades"` + TxHash string `json:"deployTxHash"` + Creator string `json:"deployer"` + CurrentOwner string `json:"currentOwner"` + CodeHash []byte `json:"initialCodeHash"` + Timestamp uint64 `json:"timestamp"` + Upgrades []*Upgrade `json:"upgrades"` + OwnersHistory []*OwnerData `json:"owners"` } // Upgrade is the DTO that holds information about a smart contract upgrade @@ -13,4 +16,5 @@ type Upgrade struct { TxHash string `json:"upgradeTxHash"` Upgrader string `json:"upgrader"` Timestamp uint64 `json:"timestamp"` + CodeHash []byte `json:"codeHash"` } diff --git a/data/tokens.go b/data/tokens.go index 5a6e5476..4018c6ea 100644 --- a/data/tokens.go +++ b/data/tokens.go @@ -70,6 +70,7 @@ type TokenProperties struct { // OwnerData is a structure that is needed to store information about an owner type OwnerData struct { + TxHash string `json:"txHash,omitempty"` Address string `json:"address"` Timestamp time.Duration `json:"timestamp"` } diff --git a/data/transaction.go b/data/transaction.go index fc00b972..04b7364f 100644 --- a/data/transaction.go +++ b/data/transaction.go @@ -46,6 +46,8 @@ type Transaction struct { Version uint32 `json:"version,omitempty"` GuardianAddress string `json:"guardian,omitempty"` GuardianSignature string `json:"guardianSignature,omitempty"` + ErrorEvent bool `json:"errorEvent,omitempty"` + CompletedEvent bool `json:"completedEvent,omitempty"` SmartContractResults []*ScResult `json:"-"` Hash string `json:"-"` BlockHash string `json:"-"` @@ -67,7 +69,6 @@ type PreparedResults struct { Transactions []*Transaction ScResults []*ScResult Receipts []*Receipt - TxHashStatus map[string]string TxHashFee map[string]*FeeData } diff --git a/facade/facade.go b/facade/facade.go new file mode 100644 index 00000000..b11a4331 --- /dev/null +++ b/facade/facade.go @@ -0,0 +1,37 @@ +package facade + +import ( + "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-es-indexer-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" +) + +type metricsFacade struct { + statusMetrics core.StatusMetricsHandler +} + +// NewMetricsFacade will create a new instance of metricsFacade +func NewMetricsFacade(statusMetrics core.StatusMetricsHandler) (*metricsFacade, error) { + if check.IfNil(statusMetrics) { + return nil, core.ErrNilMetricsHandler + } + + return &metricsFacade{ + statusMetrics: statusMetrics, + }, nil +} + +// GetMetrics will return metrics in json format +func (mf *metricsFacade) GetMetrics() map[string]*request.MetricsResponse { + return mf.statusMetrics.GetMetrics() +} + +// GetMetricsForPrometheus will return metrics in prometheus format +func (mf *metricsFacade) GetMetricsForPrometheus() string { + return mf.statusMetrics.GetMetricsForPrometheus() +} + +// IsInterfaceNil returns true if there is no value under the interface +func (mf *metricsFacade) IsInterfaceNil() bool { + return mf == nil +} diff --git a/factory/webServerFactory.go b/factory/webServerFactory.go new file mode 100644 index 00000000..14830649 --- /dev/null +++ b/factory/webServerFactory.go @@ -0,0 +1,22 @@ +package factory + +import ( + "github.com/multiversx/mx-chain-es-indexer-go/api/gin" + "github.com/multiversx/mx-chain-es-indexer-go/config" + "github.com/multiversx/mx-chain-es-indexer-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/facade" +) + +// CreateWebServer will create a new instance of core.WebServerHandler +func CreateWebServer(apiConfig config.ApiRoutesConfig, statusMetricsHandler core.StatusMetricsHandler) (core.WebServerHandler, error) { + metricsFacade, err := facade.NewMetricsFacade(statusMetricsHandler) + if err != nil { + return nil, err + } + + args := gin.ArgsWebServer{ + Facade: metricsFacade, + ApiConfig: apiConfig, + } + return gin.NewWebServer(args) +} diff --git a/factory/wsIndexerFactory.go b/factory/wsIndexerFactory.go index 5bedd89a..eaae1fdc 100644 --- a/factory/wsIndexerFactory.go +++ b/factory/wsIndexerFactory.go @@ -1,43 +1,63 @@ package factory import ( + "github.com/multiversx/mx-chain-communication-go/websocket/data" + factoryHost "github.com/multiversx/mx-chain-communication-go/websocket/factory" "github.com/multiversx/mx-chain-core-go/core/pubkeyConverter" factoryHasher "github.com/multiversx/mx-chain-core-go/hashing/factory" + "github.com/multiversx/mx-chain-core-go/marshal" factoryMarshaller "github.com/multiversx/mx-chain-core-go/marshal/factory" "github.com/multiversx/mx-chain-es-indexer-go/config" + "github.com/multiversx/mx-chain-es-indexer-go/core" "github.com/multiversx/mx-chain-es-indexer-go/process/factory" - "github.com/multiversx/mx-chain-es-indexer-go/process/wsclient" "github.com/multiversx/mx-chain-es-indexer-go/process/wsindexer" logger "github.com/multiversx/mx-chain-logger-go" ) -var log = logger.GetOrCreate("factory") - -const ( - indexerCacheSize = 1 -) +var log = logger.GetOrCreate("elasticindexer") // CreateWsIndexer will create a new instance of wsindexer.WSClient -func CreateWsIndexer(cfg config.Config, clusterCfg config.ClusterConfig) (wsindexer.WSClient, error) { - dataIndexer, err := createDataIndexer(cfg, clusterCfg) +func CreateWsIndexer(cfg config.Config, clusterCfg config.ClusterConfig, importDB bool, statusMetrics core.StatusMetricsHandler) (wsindexer.WSClient, error) { + wsMarshaller, err := factoryMarshaller.NewMarshalizer(clusterCfg.Config.WebSocket.DataMarshallerType) if err != nil { return nil, err } - wsMarshaller, err := factoryMarshaller.NewMarshalizer(clusterCfg.Config.WebSocket.DataMarshallerType) + dataIndexer, err := createDataIndexer(cfg, clusterCfg, wsMarshaller, importDB, statusMetrics) + if err != nil { + return nil, err + } + + args := wsindexer.ArgsIndexer{ + Marshaller: wsMarshaller, + DataIndexer: dataIndexer, + StatusMetrics: statusMetrics, + } + indexer, err := wsindexer.NewIndexer(args) + if err != nil { + return nil, err + } + + host, err := createWsHost(clusterCfg, wsMarshaller) if err != nil { return nil, err } - indexer, err := wsindexer.NewIndexer(wsMarshaller, dataIndexer) + err = host.SetPayloadHandler(indexer) if err != nil { return nil, err } - return wsclient.New(clusterCfg.Config.WebSocket.ServerURL, indexer) + return host, nil } -func createDataIndexer(cfg config.Config, clusterCfg config.ClusterConfig) (wsindexer.DataIndexer, error) { +func createDataIndexer( + cfg config.Config, + clusterCfg config.ClusterConfig, + wsMarshaller marshal.Marshalizer, + importDB bool, + statusMetrics core.StatusMetricsHandler, +) (wsindexer.DataIndexer, error) { marshaller, err := factoryMarshaller.NewMarshalizer(cfg.Config.Marshaller.Type) if err != nil { return nil, err @@ -46,7 +66,7 @@ func createDataIndexer(cfg config.Config, clusterCfg config.ClusterConfig) (wsin if err != nil { return nil, err } - addressPubkeyConverter, err := pubkeyConverter.NewBech32PubkeyConverter(cfg.Config.AddressConverter.Length, log) + addressPubkeyConverter, err := pubkeyConverter.NewBech32PubkeyConverter(cfg.Config.AddressConverter.Length, cfg.Config.AddressConverter.Prefix) if err != nil { return nil, err } @@ -57,7 +77,6 @@ func createDataIndexer(cfg config.Config, clusterCfg config.ClusterConfig) (wsin return factory.NewIndexer(factory.ArgsIndexerFactory{ UseKibana: clusterCfg.Config.ElasticCluster.UseKibana, - IndexerCacheSize: indexerCacheSize, Denomination: cfg.Config.Economics.Denomination, BulkRequestMaxSize: clusterCfg.Config.ElasticCluster.BulkRequestMaxSizeInBytes, Url: clusterCfg.Config.ElasticCluster.URL, @@ -68,6 +87,9 @@ func createDataIndexer(cfg config.Config, clusterCfg config.ClusterConfig) (wsin Hasher: hasher, AddressPubkeyConverter: addressPubkeyConverter, ValidatorPubkeyConverter: validatorPubkeyConverter, + HeaderMarshaller: wsMarshaller, + ImportDB: importDB, + StatusMetrics: statusMetrics, }) } @@ -89,3 +111,18 @@ func prepareIndices(availableIndices, disabledIndices []string) []string { return indices } + +func createWsHost(clusterCfg config.ClusterConfig, wsMarshaller marshal.Marshalizer) (factoryHost.FullDuplexHost, error) { + return factoryHost.CreateWebSocketHost(factoryHost.ArgsWebSocketHost{ + WebSocketConfig: data.WebSocketConfig{ + URL: clusterCfg.Config.WebSocket.URL, + WithAcknowledge: clusterCfg.Config.WebSocket.WithAcknowledge, + Mode: clusterCfg.Config.WebSocket.Mode, + RetryDurationInSec: int(clusterCfg.Config.WebSocket.RetryDurationInSec), + AcknowledgeTimeoutInSec: int(clusterCfg.Config.WebSocket.AckTimeoutInSec), + BlockingAckOnError: clusterCfg.Config.WebSocket.BlockingAckOnError, + }, + Marshaller: wsMarshaller, + Log: log, + }) +} diff --git a/go.mod b/go.mod index 8dfe2492..2dd2ccb9 100644 --- a/go.mod +++ b/go.mod @@ -1,34 +1,62 @@ module github.com/multiversx/mx-chain-es-indexer-go -go 1.17 +go 1.20 require ( github.com/elastic/go-elasticsearch/v7 v7.12.0 - github.com/gorilla/websocket v1.5.0 - github.com/multiversx/mx-chain-core-go v1.1.37 - github.com/multiversx/mx-chain-logger-go v1.0.11 - github.com/multiversx/mx-chain-vm-common-go v1.3.40 - github.com/stretchr/testify v1.7.0 + github.com/gin-contrib/cors v1.4.0 + github.com/gin-gonic/gin v1.9.1 + github.com/multiversx/mx-chain-communication-go v1.0.12 + github.com/multiversx/mx-chain-core-go v1.2.16 + github.com/multiversx/mx-chain-logger-go v1.0.13 + github.com/multiversx/mx-chain-vm-common-go v1.5.2 + github.com/prometheus/client_model v0.4.0 + github.com/prometheus/common v0.37.0 + github.com/stretchr/testify v1.8.4 github.com/tidwall/gjson v1.14.0 - github.com/urfave/cli v1.22.9 + github.com/urfave/cli v1.22.10 + google.golang.org/protobuf v1.30.0 ) require ( github.com/btcsuite/btcd/btcutil v1.1.3 // indirect - github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d // indirect + github.com/bytedance/sonic v1.9.1 // indirect + github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 // indirect + github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect github.com/davecgh/go-spew v1.1.1 // indirect github.com/denisbrodbeck/machineid v1.0.1 // indirect + github.com/gabriel-vasile/mimetype v1.4.2 // indirect + github.com/gin-contrib/sse v0.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect + github.com/go-playground/validator/v10 v10.14.0 // indirect + github.com/goccy/go-json v0.10.2 // indirect github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/protobuf v1.5.2 // indirect + github.com/golang/protobuf v1.5.3 // indirect + github.com/google/go-cmp v0.5.8 // indirect + github.com/gorilla/mux v1.8.0 // indirect + github.com/gorilla/websocket v1.5.0 // indirect + github.com/json-iterator/go v1.1.12 // indirect + github.com/klauspost/cpuid/v2 v2.2.5 // indirect + github.com/leodido/go-urn v1.2.4 // indirect + github.com/mattn/go-isatty v0.0.19 // indirect + github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect + github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect + github.com/modern-go/reflect2 v1.0.2 // indirect github.com/mr-tron/base58 v1.2.0 // indirect github.com/pelletier/go-toml v1.9.3 // indirect + github.com/pelletier/go-toml/v2 v2.0.8 // indirect github.com/pmezard/go-difflib v1.0.0 // indirect github.com/russross/blackfriday/v2 v2.0.1 // indirect github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect github.com/tidwall/match v1.1.1 // indirect github.com/tidwall/pretty v1.2.0 // indirect - golang.org/x/crypto v0.3.0 // indirect - golang.org/x/sys v0.2.0 // indirect - google.golang.org/protobuf v1.26.0 // indirect - gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c // indirect + github.com/twitchyliquid64/golang-asm v0.15.1 // indirect + github.com/ugorji/go/codec v1.2.11 // indirect + golang.org/x/arch v0.3.0 // indirect + golang.org/x/crypto v0.9.0 // indirect + golang.org/x/net v0.10.0 // indirect + golang.org/x/sys v0.8.0 // indirect + golang.org/x/text v0.9.0 // indirect + gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index f0db6a13..87bf48e2 100644 --- a/go.sum +++ b/go.sum @@ -1,8 +1,49 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= +cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= +cloud.google.com/go v0.52.0/go.mod h1:pXajvRH/6o3+F9jDHZWQ5PbGhn+o8w9qiu/CffaVdO4= +cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= +cloud.google.com/go v0.54.0/go.mod h1:1rq2OEkV3YMf6n/9ZvGWI3GWw0VoqH/1x2nd8Is/bPc= +cloud.google.com/go v0.56.0/go.mod h1:jr7tqZxxKOVYizybht9+26Z/gUq7tiRzu+ACVAMbKVk= +cloud.google.com/go v0.57.0/go.mod h1:oXiQ6Rzq3RAkkY7N6t3TcE6jE+CIBBbA36lwQ1JyzZs= +cloud.google.com/go v0.62.0/go.mod h1:jmCYTdRCQuc1PHIIJ/maLInMho30T/Y0M4hTdTShOYc= +cloud.google.com/go v0.65.0/go.mod h1:O5N8zS7uWy9vkA9vayVHs65eM1ubvY4h553ofrNHObY= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= +cloud.google.com/go/bigquery v1.4.0/go.mod h1:S8dzgnTigyfTmLBfrtrhyYhwRxG72rYxvftPBK2Dvzc= +cloud.google.com/go/bigquery v1.5.0/go.mod h1:snEHRnqQbz117VIFhE8bmtwIDY80NLUZUMb4Nv6dBIg= +cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4gLoIoXIAPc= +cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk= +cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I= +cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw= +cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIAii9o8iA= +cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU= +cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw= +cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos= +cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= +cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= +cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= +github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= github.com/btcsuite/btcd v0.22.0-beta.0.20220111032746-97732e52810c/go.mod h1:tjmYdS6MLJ5/s0Fj4DbLgSbDHbEqLJrtnHecBFkdz5M= -github.com/btcsuite/btcd v0.23.0 h1:V2/ZgjfDFIygAX3ZapeigkVBoVUtOJKSwrhZdlpSvaA= github.com/btcsuite/btcd v0.23.0/go.mod h1:0QJIIN1wwIXF/3G/m87gIwGniDMDQqjVn4SZgnFpsYY= github.com/btcsuite/btcd/btcec/v2 v2.1.0/go.mod h1:2VzYrv4Gm4apmbVVsSq5bqf1Ec8v56E48Vt0Y/umPgA= github.com/btcsuite/btcd/btcec/v2 v2.1.3/go.mod h1:ctjw4H1kknNJmRN4iP1R7bTQ+v3GJkZBd6mui8ZsAZE= @@ -21,8 +62,24 @@ github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku github.com/btcsuite/snappy-go v1.0.0/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= -github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d h1:U+s90UTSYgptZMwQh2aRr3LuazLJIa+Pg3Kc1ylSYVY= +github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1O2AihPM= +github.com/bytedance/sonic v1.9.1 h1:6iJ6NqdoxCDr6mbY8h18oSO+cShGSMRGCEo7F2h0x8s= +github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZXU064P/U= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/cespare/xxhash/v2 v2.1.2/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= +github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311 h1:qSGYFH7+jGhDF8vLC+iwCD4WpbV1EBDSzWkJODFLams= +github.com/chenzhuoyu/base64x v0.0.0-20221115062448-fe3a3abad311/go.mod h1:b583jCggY9gE99b6G5LEC39OIiVsWj+R97kbl5odCEk= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -34,48 +91,173 @@ github.com/denisbrodbeck/machineid v1.0.1 h1:geKr9qtkB876mXguW2X6TU4ZynleN6ezuMS github.com/denisbrodbeck/machineid v1.0.1/go.mod h1:dJUwb7PTidGDeYyUBmXZ2GphQBbjJCrnectwCyxcUSI= github.com/elastic/go-elasticsearch/v7 v7.12.0 h1:j4tvcMrZJLp39L2NYvBb7f+lHKPqPHSL3nvB8+/DV+s= github.com/elastic/go-elasticsearch/v7 v7.12.0/go.mod h1:OJ4wdbtDNk5g503kvlHLyErCgQwwzmDtaFC4XyOxXA4= +github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= +github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU= +github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA= +github.com/gin-contrib/cors v1.4.0 h1:oJ6gwtUl3lqV0WEIwM/LxPF1QZ5qe2lGWdY2+bz7y0g= +github.com/gin-contrib/cors v1.4.0/go.mod h1:bs9pNM0x/UsmHPBWT2xZz9ROh8xYjYkiURUfmBoMlcs= +github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= +github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= +github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk= +github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg= +github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU= +github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY= +github.com/go-kit/log v0.2.0/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= +github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A= +github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs= +github.com/go-playground/assert/v2 v2.0.1/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= +github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= +github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= +github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= +github.com/go-playground/locales v0.14.1/go.mod h1:hxrqLVvrK65+Rwrd5Fc6F2O76J/NuW9t0sjnWqG1slY= +github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= +github.com/go-playground/universal-translator v0.18.1 h1:Bcnm0ZwsGyWbCzImXv+pAJnYK9S473LQFuzCbDbfSFY= +github.com/go-playground/universal-translator v0.18.1/go.mod h1:xekY+UJKNuX9WP91TpwSH2VMlDf28Uj24BCp08ZFTUY= +github.com/go-playground/validator/v10 v10.10.0/go.mod h1:74x4gJWsvQexRdW8Pn3dXSGrTK4nAUsbPlLADvpJkos= +github.com/go-playground/validator/v10 v10.14.0 h1:vgvQWe3XCz3gIeFDm/HnTIbj6UGmg/+t63MyGU2n5js= +github.com/go-playground/validator/v10 v10.14.0/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU= +github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/mock v1.4.0/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.1/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.3/go.mod h1:UOMv5ysSaYNkG+OFQykRIcU/QvvxJf3p21QfJ2Bt3cw= +github.com/golang/mock v1.4.4/go.mod h1:l3mdAwkq5BuhzHwde/uurv3sEJeZMXNpwsxVWU71h+4= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= +github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= +github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= +github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= +github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= +github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= +github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= +github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/martian/v3 v3.0.0/go.mod h1:y5Zk1BBys9G+gd6Jrk0W3cC1+ELVxBWuIGO+w/tUAp0= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gorilla/mux v1.8.0 h1:i40aqfkR1h2SlN9hojwV5ZA91wcXFOvkdNIeFDP5koI= github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= -github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWmnc= github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= +github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= +github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.11/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= +github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= +github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= +github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa0213Md/qVLRg= +github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg= +github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0= +github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= +github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= +github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= +github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q= +github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4= +github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= +github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= +github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= +github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= +github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mr-tron/base58 v1.2.0 h1:T/HDJBh4ZCPbU39/+c3rRvE0uKBQlU27+QI8LJ4t64o= github.com/mr-tron/base58 v1.2.0/go.mod h1:BinMc/sQntlIE1frQmRFPUoPA1Zkr8VRgBdjWI2mNwc= -github.com/multiversx/mx-chain-core-go v1.1.30/go.mod h1:8gGEQv6BWuuJwhd25qqhCOZbBSv9mk+hLeKvinSaSMk= -github.com/multiversx/mx-chain-core-go v1.1.37 h1:2EYoUWjr+8zUYEt3TBMnQ+0UUZwDb71HA+KBwqDUpVQ= -github.com/multiversx/mx-chain-core-go v1.1.37/go.mod h1:8gGEQv6BWuuJwhd25qqhCOZbBSv9mk+hLeKvinSaSMk= -github.com/multiversx/mx-chain-logger-go v1.0.11 h1:DFsHa+sc5fKwhDR50I8uBM99RTDTEW68ESyr5ALRDwE= -github.com/multiversx/mx-chain-logger-go v1.0.11/go.mod h1:1srDkP0DQucWQ+rYfaq0BX2qLnULsUdRPADpYUTM6dA= -github.com/multiversx/mx-chain-vm-common-go v1.3.40 h1:oGUIW0EoCWHHh8bAxGKbaQN7kUO0fMzuezbYMJfpsu8= -github.com/multiversx/mx-chain-vm-common-go v1.3.40/go.mod h1:r+aILrY07ue89PH+D+B+Pp0viO1U3kN98t1pXneSgkE= +github.com/multiversx/mx-chain-communication-go v1.0.12 h1:67WOaf87gpwouydD1AAOHw5LMGZh7NfITrp/KqFY3Tw= +github.com/multiversx/mx-chain-communication-go v1.0.12/go.mod h1:+oaUowpq+SqrEmAsMPGwhz44g7L81loWb6AiNQU9Ms4= +github.com/multiversx/mx-chain-core-go v1.2.16 h1:m0hUNmZQjGJxKDLQOHoM9jSaeDfVTbyd+mqiS8+NckE= +github.com/multiversx/mx-chain-core-go v1.2.16/go.mod h1:BILOGHUOIG5dNNX8cgkzCNfDaVtoYrJRYcPnpxRMH84= +github.com/multiversx/mx-chain-crypto-go v1.2.8 h1:wOgVlUaO5X4L8iEbFjcQcL8SZvv6WZ7LqH73BiRPhxU= +github.com/multiversx/mx-chain-logger-go v1.0.13 h1:eru/TETo0MkO4ZTnXsQDKf4PBRpAXmqjT02klNT/JnY= +github.com/multiversx/mx-chain-logger-go v1.0.13/go.mod h1:MZJhTAtZTJxT+yK2EHc4ZW3YOHUc1UdjCD0iahRNBZk= +github.com/multiversx/mx-chain-vm-common-go v1.5.2 h1:iRWJNlogjkq9w+pJZIfkVkXQFmMoRxZr6pzCfg2/K68= +github.com/multiversx/mx-chain-vm-common-go v1.5.2/go.mod h1:sqkKMCnwkWl8DURdb9q7pctK8IANghdHY1KJLE0ox2c= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= @@ -87,16 +269,64 @@ github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7J github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/pelletier/go-toml v1.9.3 h1:zeC5b1GviRUyKYd6OJPvBU/mcVDVoL1OhT17FCt5dSQ= github.com/pelletier/go-toml v1.9.3/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c= +github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo= +github.com/pelletier/go-toml/v2 v2.0.8 h1:0ctb6s9mE31h0/lhu+J6OPmVeDxJn+kYnJc2jZR9tGQ= +github.com/pelletier/go-toml/v2 v2.0.8/go.mod h1:vuYfssBdrU2XDZ9bYydBu6t+6a6PYNcZljzZR9VXg+4= +github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v1.0.0/go.mod h1:db9x61etRT2tGnBNRi70OPL5FsnadC4Ky3P0J6CfImo= +github.com/prometheus/client_golang v1.7.1/go.mod h1:PY5Wy2awLA44sXw4AOSfFBetzPP4j5+D6mVACh+pe2M= +github.com/prometheus/client_golang v1.11.0/go.mod h1:Z6t4BnS23TR94PD6BsDNk8yVqroYurpAkEiz0P2BEV0= +github.com/prometheus/client_golang v1.12.1/go.mod h1:3Z9XVyYiZYEO+YQWt3RD2R3jrbd179Rt297l4aS6nDY= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190129233127-fd36f4220a90/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.2.0/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/client_model v0.4.0 h1:5lQXD3cAg1OXBf4Wq03gTrXHeaV0TQvGfUooCfx1yqY= +github.com/prometheus/client_model v0.4.0/go.mod h1:oMQmHW1/JoDwqLtg57MGgP/Fb1CJEYF2imWWhWtMkYU= +github.com/prometheus/common v0.4.1/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/common v0.10.0/go.mod h1:Tlit/dnDKsSWFlCLTWaA1cyBgKHSMdTB80sz/V91rCo= +github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9VFqTh1DIvc= +github.com/prometheus/common v0.32.1/go.mod h1:vu+V0TpY+O6vW9J44gczi3Ap/oXXR10b+M/gUGO4Hls= +github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8pXE= +github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.2/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= +github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU= +github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= +github.com/rogpeppe/go-internal v1.8.0 h1:FCbCCtXNOY3UtUuHUYaghJg4y7Fd14rXifAYUAtL9R8= +github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE= github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= +github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= +github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= +github.com/stretchr/testify v1.8.3/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= +github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= +github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= github.com/tidwall/gjson v1.14.0 h1:6aeJ0bzojgWLa82gDQHcx3S0Lr/O51I9bJ5nv6JFx5w= github.com/tidwall/gjson v1.14.0/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= @@ -104,87 +334,338 @@ github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= github.com/tidwall/pretty v1.2.0 h1:RWIZEg2iJ8/g6fDDYzMpobmaoGh5OLl4AXtGUGPcqCs= github.com/tidwall/pretty v1.2.0/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/urfave/cli v1.22.9 h1:cv3/KhXGBGjEXLC4bH0sLuJ9BewaAbpk5oyMOveu4pw= -github.com/urfave/cli v1.22.9/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI= +github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08= +github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M= +github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY= +github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU= +github.com/ugorji/go/codec v1.2.11/go.mod h1:UNopzCgEMSXjBc6AOMqYvWC1ktqTAfzJZUZgYf6w6lg= +github.com/urfave/cli v1.22.10 h1:p8Fspmz3iTctJstry1PYS3HVdllxnEzTEsgIgtxTrCk= +github.com/urfave/cli v1.22.10/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.3/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +go.opencensus.io v0.22.4/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= +golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= +golang.org/x/arch v0.3.0 h1:02VY4/ZcO/gBOH6PUaoiptASxtXU10jazRCP865E97k= +golang.org/x/arch v0.3.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8= golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.3.0 h1:a06MkbcxBrEFc0w0QIZWXrH/9cCX6KJyWbBOIwAn+7A= -golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= +golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= +golang.org/x/crypto v0.9.0 h1:LF6fAI+IutBocDJ2OT0Q1g8plpYljMZ4+lty+dsqw3g= +golang.org/x/crypto v0.9.0/go.mod h1:yrmDGqONDYtNj3tH8X9dzUun2m2lzPa9ngI6/RUPGR0= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek= +golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY= +golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= +golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= +golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs= +golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200114155413-6afb5195e5aa/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200222125558-5a598a2470a0/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200506145744-7e3656a0809f/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200520182314-0ba52f642ac2/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200707034311-ab3426394381/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= +golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= -golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= +golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= +golang.org/x/net v0.10.0 h1:X2//UzNDwYmtCLn7To6G58Wr6f5ahEAQgKNzv9Y951M= +golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A= +golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200113162924-86b910548bc1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200202164722-d101bd2416d5/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200212091648-12a6c2dcc1e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200331124033-c3d80250170d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200501052902-10377860bb8e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200511232937-7e40ca221e25/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200515095857-1151b9dac4a9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200523222454-059865788121/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200615200032-f1bc736245b1/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200803210538-64077c9b5642/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.2.0 h1:ljd4t30dBnAvMZaQCevtY0xLLD0A+bRZXbgLMLU1F/A= -golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20210806184541-e5e7981a1069/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.8.0 h1:EBmGv8NaZBZTWvrbjNoL6HVt+IVy3QDQpJs7VRIw3tU= +golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= -golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= +golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= +golang.org/x/text v0.9.0 h1:2sjJmO8cDvYveuX97RDLsxlyUxLl+GHoLxBiRdHllBE= +golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200117161641-43d50277825c/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200122220014-bf1340f18c4a/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200204074204-1cc6d1ef6c74/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200207183749-b753a1ba74fa/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200224181240-023911ca70b2/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200227222343-706bc42d1f0d/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200304193943-95d2e580d8eb/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200312045724-11d5b4c81c7d/go.mod h1:o4KQGtdN14AW+yjsvvwRTJJuXz8XRtIHtEnmAXLyFUw= +golang.org/x/tools v0.0.0-20200331025713-a30bf2db82d4/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8= +golang.org/x/tools v0.0.0-20200501065659-ab2804fb9c9d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200512131952-2bc93b1c0c88/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200515010526-7d3b6ebf133d/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200618134242-20370b0cb4b2/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= +google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.18.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.19.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.20.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.22.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= +google.golang.org/api v0.24.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.28.0/go.mod h1:lIXQywCXRcnZPGlsd8NbLnOjtAoL6em04bJ9+z0MncE= +google.golang.org/api v0.29.0/go.mod h1:Lcubydp8VUV7KeIHD9z2Bys/sm/vGKnG1UHuDBSrHWM= +google.golang.org/api v0.30.0/go.mod h1:QGmEvQ87FHZNiUVJkT14jQNYJ4ZJjdRF23ZXz5138Fc= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8= +google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200115191322-ca5a22157cba/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200122232147-0452cf42e150/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc= +google.golang.org/genproto v0.0.0-20200204135345-fa8e72b47b90/go.mod h1:GmwEX6Z4W5gMy59cAlVYjN9JhxgbQH6Gn+gFDQe2lzA= +google.golang.org/genproto v0.0.0-20200212174721-66ed5ce911ce/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200224152610-e50cd9704f63/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200228133532-8c2c7df3a383/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200305110556-506484158171/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200312145019-da6875a35672/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200331122359-1ee6d9798940/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200430143042-b979b6f78d84/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200511104702-f5ebc3bea380/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c= +google.golang.org/genproto v0.0.0-20200515170657-fc4c6c6a6587/go.mod h1:YsZOwe1myG/8QRHRsmBRE1LrgQY60beZKjly0O1fX9U= +google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= +google.golang.org/genproto v0.0.0-20200618031413-b414f8b61790/go.mod h1:jDfRM7FcilCzHH/e9qn6dsT145K34l5v+OpcnNgKAAA= +google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= +google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +google.golang.org/grpc v1.28.0/go.mod h1:rpkK4SK4GF4Ach/+MFLZUBavHOvF2JJB5uozKKal+60= +google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3IjizoKk= +google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= +google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= +google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= +google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= +google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0 h1:bxAC2xTBsZGibn2RTntX0oH50xLsqy1OxA9tTL3p/lk= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= +google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= +google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= +gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg= +honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= +rsc.io/pdf v0.1.1/go.mod h1:n8OzWcQ6Sp37PL01nO98y4iUCRdTGarVfzxY20ICaU4= +rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0= +rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA= diff --git a/integrationtests/accountsBalanceNftTransfer_test.go b/integrationtests/accountsBalanceNftTransfer_test.go index 1c66a240..7dbd2023 100644 --- a/integrationtests/accountsBalanceNftTransfer_test.go +++ b/integrationtests/accountsBalanceNftTransfer_test.go @@ -3,12 +3,15 @@ package integrationtests import ( + "context" + "encoding/hex" "fmt" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -16,6 +19,26 @@ import ( "github.com/stretchr/testify/require" ) +func createOutportBlockWithHeader( + body *dataBlock.Body, + header coreData.HeaderHandler, + pool *outport.TransactionPool, + coreAlteredAccounts map[string]*alteredAccount.AlteredAccount, + numOfShards uint32, +) *outport.OutportBlockWithHeader { + return &outport.OutportBlockWithHeader{ + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + Body: body, + }, + TransactionPool: pool, + AlteredAccounts: coreAlteredAccounts, + NumberOfShards: numOfShards, + }, + Header: header, + } +} + func TestAccountBalanceNFTTransfer(t *testing.T) { setLogLevelDebug() @@ -36,11 +59,11 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { ShardID: 1, } - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(addr), @@ -54,10 +77,10 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { }, } - coreAlteredAccounts := map[string]*outport.AlteredAccount{ + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ addr: { Address: addr, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "NFT-abcdef", Nonce: 7440483, @@ -67,12 +90,12 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids := []string{fmt.Sprintf("%s-NFT-abcdef-718863", addr)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceNftTransfer/balance-nft-after-create.json"), string(genericResponse.Docs[0].Source)) @@ -85,11 +108,11 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { ShardID: 1, } - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: []byte("test-address-balance-1"), @@ -106,10 +129,10 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { esProc, err = CreateElasticProcessor(esClient) require.Nil(t, err) - coreAlteredAccounts = map[string]*outport.AlteredAccount{ + coreAlteredAccounts = map[string]*alteredAccount.AlteredAccount{ addr: { Address: addr, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "NFT-abcdef", Nonce: 7440483, @@ -119,7 +142,7 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { }, addrReceiver: { Address: addrReceiver, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "NFT-abcdef", Nonce: 7440483, @@ -128,18 +151,18 @@ func TestAccountBalanceNFTTransfer(t *testing.T) { }, }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids = []string{fmt.Sprintf("%s-NFT-abcdef-718863", addr)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.False(t, genericResponse.Docs[0].Found) ids = []string{fmt.Sprintf("%s-NFT-abcdef-718863", addrReceiver)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceNftTransfer/balance-nft-after-transfer.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/accountsBalanceWithLowerTimestamp_test.go b/integrationtests/accountsBalanceWithLowerTimestamp_test.go index 30ab7673..afc20028 100644 --- a/integrationtests/accountsBalanceWithLowerTimestamp_test.go +++ b/integrationtests/accountsBalanceWithLowerTimestamp_test.go @@ -3,12 +3,14 @@ package integrationtests import ( + "context" + "encoding/hex" "fmt" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -33,10 +35,10 @@ func TestIndexAccountsBalance(t *testing.T) { addr := "erd17umc0uvel62ng30k5uprqcxh3ue33hq608njejaqljuqzqlxtzuqeuzlcv" addr2 := "erd1m2pyjudsqt8gn0tnsstht35gfqcfx8ku5utz07mf2r6pq3sfxjzszhcx6w" - alteredAccount := &outport.AlteredAccount{ + account := &alteredAccount.AlteredAccount{ Address: addr, Balance: "0", - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "TTTT-abcd", Balance: "1000", @@ -45,9 +47,9 @@ func TestIndexAccountsBalance(t *testing.T) { }, } - coreAlteredAccounts := map[string]*outport.AlteredAccount{ - addr: alteredAccount, - addr2: alteredAccount, + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ + addr: account, + addr2: account, } esProc, err := CreateElasticProcessor(esClient) @@ -59,11 +61,11 @@ func TestIndexAccountsBalance(t *testing.T) { ShardID: 2, } - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: []byte("eeeebbbb"), @@ -77,18 +79,18 @@ func TestIndexAccountsBalance(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids := []string{addr} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json"), string(genericResponse.Docs[0].Source)) ids = []string{fmt.Sprintf("%s-TTTT-abcd-00", addr)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json"), string(genericResponse.Docs[0].Source)) @@ -100,18 +102,18 @@ func TestIndexAccountsBalance(t *testing.T) { ShardID: 2, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids = []string{addr} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json"), string(genericResponse.Docs[0].Source)) ids = []string{fmt.Sprintf("%s-TTTT-abcd-00", addr)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-first-update.json"), string(genericResponse.Docs[0].Source)) @@ -123,20 +125,23 @@ func TestIndexAccountsBalance(t *testing.T) { } coreAlteredAccounts[addr].Balance = "2000" - coreAlteredAccounts[addr].AdditionalData = &outport.AdditionalAccountData{ + coreAlteredAccounts[addr].AdditionalData = &alteredAccount.AdditionalAccountData{ IsSender: true, BalanceChanged: true, } - pool = &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "h1": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - SndAddr: []byte(addr), - }, 0, big.NewInt(0)), + pool = &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString([]byte("h1")): { + Transaction: &transaction.Transaction{ + SndAddr: []byte(addr), + }, + FeeInfo: &outport.FeeInfo{}, + }, }, - Logs: []*coreData.LogData{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(addr2), @@ -159,18 +164,18 @@ func TestIndexAccountsBalance(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids = []string{addr} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json"), string(genericResponse.Docs[0].Source)) ids = []string{fmt.Sprintf("%s-TTTT-abcd-00", addr)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-esdt-second-update.json"), string(genericResponse.Docs[0].Source)) @@ -188,24 +193,24 @@ func TestIndexAccountsBalance(t *testing.T) { coreAlteredAccounts[addr].Balance = "2000" coreAlteredAccounts[addr].Tokens[0].Balance = "0" - coreAlteredAccounts[addr].AdditionalData = &outport.AdditionalAccountData{ + coreAlteredAccounts[addr].AdditionalData = &alteredAccount.AdditionalAccountData{ IsSender: false, BalanceChanged: false, } - pool.Txs = make(map[string]coreData.TransactionHandlerWithGasUsedAndFee) - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + pool.Transactions = make(map[string]*outport.TxInfo) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids = []string{addr} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json"), string(genericResponse.Docs[0].Source)) ids = []string{fmt.Sprintf("%s-TTTT-abcd-00", addr)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.False(t, genericResponse.Docs[0].Found) } diff --git a/integrationtests/accountsESDTRollback_test.go b/integrationtests/accountsESDTRollback_test.go index d33d7a2a..9fd3a06f 100644 --- a/integrationtests/accountsESDTRollback_test.go +++ b/integrationtests/accountsESDTRollback_test.go @@ -3,13 +3,15 @@ package integrationtests import ( + "context" + "encoding/hex" "encoding/json" "fmt" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -32,15 +34,15 @@ func TestAccountsESDTDeleteOnRollback(t *testing.T) { }, } addr := "erd1sqy2ywvswp09ef7qwjhv8zwr9kzz3xas6y2ye5nuryaz0wcnfzzsnq0am3" - coreAlteredAccounts := map[string]*outport.AlteredAccount{ + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ addr: { Address: addr, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "TOKEN-eeee", Nonce: 2, Balance: "1000", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "creator", }, Properties: "3032", @@ -54,11 +56,11 @@ func TestAccountsESDTDeleteOnRollback(t *testing.T) { // CREATE SEMI-FUNGIBLE TOKEN esdtDataBytes, _ := json.Marshal(esdtToken) - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(addr), @@ -79,12 +81,12 @@ func TestAccountsESDTDeleteOnRollback(t *testing.T) { ShardID: 2, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids := []string{fmt.Sprintf("%s-TOKEN-eeee-02", addr)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTRollback/account-after-create.json"), string(genericResponse.Docs[0].Source)) @@ -92,7 +94,7 @@ func TestAccountsESDTDeleteOnRollback(t *testing.T) { err = esProc.RemoveAccountsESDT(5040, 2) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.False(t, genericResponse.Docs[0].Found) } diff --git a/integrationtests/accountsESDTWithTokenType_test.go b/integrationtests/accountsESDTWithTokenType_test.go index 77d0139e..22bebdfd 100644 --- a/integrationtests/accountsESDTWithTokenType_test.go +++ b/integrationtests/accountsESDTWithTokenType_test.go @@ -3,6 +3,8 @@ package integrationtests import ( + "context" + "encoding/hex" "encoding/json" "fmt" "math/big" @@ -10,7 +12,7 @@ import ( "time" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -37,11 +39,12 @@ func TestIndexAccountESDTWithTokenType(t *testing.T) { } address := "erd1sqy2ywvswp09ef7qwjhv8zwr9kzz3xas6y2ye5nuryaz0wcnfzzsnq0am3" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address), Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -55,27 +58,27 @@ func TestIndexAccountESDTWithTokenType(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids := []string{"SEMI-abcd"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/token-after-issue.json"), string(genericResponse.Docs[0].Source)) // ################ CREATE SEMI FUNGIBLE TOKEN ########################## - coreAlteredAccounts := map[string]*outport.AlteredAccount{ + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ address: { Address: address, Balance: "1000", - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "SEMI-abcd", Balance: "1000", Nonce: 2, Properties: "3032", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "creator", }, }, @@ -98,11 +101,12 @@ func TestIndexAccountESDTWithTokenType(t *testing.T) { } esdtDataBytes, _ := json.Marshal(esdtData) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address), Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -116,12 +120,12 @@ func TestIndexAccountESDTWithTokenType(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids = []string{fmt.Sprintf("%s-SEMI-abcd-02", address)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/account-esdt.json"), string(genericResponse.Docs[0].Source)) @@ -137,17 +141,17 @@ func TestIndexAccountESDTWithTokenTypeShardFirstAndMetachainAfter(t *testing.T) body := &dataBlock.Body{} address := "erd1l29zsl2dqq988kvr2y0xlfv9ydgnvhzkatfd8ccalpag265pje8qn8lslf" - coreAlteredAccounts := map[string]*outport.AlteredAccount{ + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ address: { Address: address, Balance: "1000", - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "TTTT-abcd", Nonce: 2, Balance: "1000", Properties: "3032", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "erd1l29zsl2dqq988kvr2y0xlfv9ydgnvhzkatfd8ccalpag265pje8qn8lslf", }, }, @@ -170,11 +174,12 @@ func TestIndexAccountESDTWithTokenTypeShardFirstAndMetachainAfter(t *testing.T) } esdtDataBytes, _ := json.Marshal(esdtData) - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address), Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -188,12 +193,12 @@ func TestIndexAccountESDTWithTokenTypeShardFirstAndMetachainAfter(t *testing.T) }, } - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids := []string{fmt.Sprintf("%s-TTTT-abcd-02", address)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/account-esdt-without-type.json"), string(genericResponse.Docs[0].Source)) @@ -209,11 +214,12 @@ func TestIndexAccountESDTWithTokenTypeShardFirstAndMetachainAfter(t *testing.T) esProc, err = CreateElasticProcessor(esClient) require.Nil(t, err) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address), Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -227,24 +233,24 @@ func TestIndexAccountESDTWithTokenTypeShardFirstAndMetachainAfter(t *testing.T) }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids = []string{"TTTT-abcd"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/semi-fungible-token.json"), string(genericResponse.Docs[0].Source)) ids = []string{fmt.Sprintf("%s-TTTT-abcd-02", address)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/account-esdt-with-type.json"), string(genericResponse.Docs[0].Source)) ids = []string{"TTTT-abcd-02"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/accountsESDTWithTokenType/semi-fungible-token-after-create.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/claimRewards_test.go b/integrationtests/claimRewards_test.go index 38a093e6..8901fe1f 100644 --- a/integrationtests/claimRewards_test.go +++ b/integrationtests/claimRewards_test.go @@ -3,12 +3,12 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -88,21 +88,29 @@ func TestTransactionWithClaimRewardsGasRefund(t *testing.T) { Value: big.NewInt(0), } - tx := outport.NewTransactionHandlerWithGasAndFee(tx1, 1068000, big.NewInt(78000000000000)) - tx.SetInitialPaidFee(big.NewInt(127320000000000)) + txInfo := &outport.TxInfo{ + Transaction: tx1, + FeeInfo: &outport.FeeInfo{ + GasUsed: 1068000, + Fee: big.NewInt(78000000000000), + InitialPaidFee: big.NewInt(127320000000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, }, - Logs: []*coreData.LogData{ + Logs: []*outport.LogData{ { - TxHash: string(txHash), - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString(txHash), + Log: &transaction.Log{ + Address: decodeAddress(addressSender), Events: []*transaction.Event{ { Address: decodeAddress(addressSender), @@ -115,12 +123,12 @@ func TestTransactionWithClaimRewardsGasRefund(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/consts.go b/integrationtests/consts.go index 6995ff9d..8593fcf6 100644 --- a/integrationtests/consts.go +++ b/integrationtests/consts.go @@ -5,4 +5,6 @@ const ( testNumOfShards = 3 //nolint esURL = "http://localhost:9200" + //nolint + addressPrefix = "erd" ) diff --git a/integrationtests/createNFTWithTags_test.go b/integrationtests/createNFTWithTags_test.go index fdcad119..c3824215 100644 --- a/integrationtests/createNFTWithTags_test.go +++ b/integrationtests/createNFTWithTags_test.go @@ -3,6 +3,7 @@ package integrationtests import ( + "context" "encoding/hex" "encoding/json" "fmt" @@ -10,7 +11,7 @@ import ( "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -47,11 +48,12 @@ func TestCreateNFTWithTags(t *testing.T) { // CREATE A FIRST NFT WITH THE TAGS address1 := "erd1v7e552pz9py4hv6raan0c4jflez3e6csdmzcgrncg0qrnk4tywvsqx0h5j" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -65,20 +67,20 @@ func TestCreateNFTWithTags(t *testing.T) { }, } - coreAlteredAccounts := map[string]*outport.AlteredAccount{ + coreAlteredAccounts := map[string]*alteredAccount.AlteredAccount{ address1: { Address: address1, Balance: "0", - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { - AdditionalData: &outport.AdditionalAccountTokenData{ + AdditionalData: &alteredAccount.AdditionalAccountTokenData{ IsNFTCreate: true, }, Identifier: "DESK-abcd", Nonce: 1, Balance: "1000", Properties: "3032", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "creator", Attributes: []byte("tags:hello,something,do,music,art,gallery;metadata:QmZ2QqaGq4bqsEzs5JLTjRmmvR2GAR4qXJZBN8ibfDdaud"), }, @@ -88,18 +90,18 @@ func TestCreateNFTWithTags(t *testing.T) { } body := &dataBlock.Body{} - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids := []string{fmt.Sprintf("%s-DESK-abcd-01", address1)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.AccountsESDTIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.AccountsESDTIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/createNFTWithTags/accounts-esdt-address-balance.json"), string(genericResponse.Docs[0].Source)) ids = []string{"bXVzaWM=", "aGVsbG8=", "Z2FsbGVyeQ==", "ZG8=", "YXJ0", "c29tZXRoaW5n"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TagsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TagsIndex, true, genericResponse) require.Nil(t, err) tagsChecked := 0 @@ -115,11 +117,12 @@ func TestCreateNFTWithTags(t *testing.T) { require.Equal(t, len(ids), tagsChecked) // CREATE A SECOND NFT WITH THE SAME TAGS - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -135,11 +138,11 @@ func TestCreateNFTWithTags(t *testing.T) { coreAlteredAccounts[address1].Tokens[0].Nonce = 2 body = &dataBlock.Body{} - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TagsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TagsIndex, true, genericResponse) require.Nil(t, err) tagsChecked = 0 @@ -164,11 +167,12 @@ func TestCreateNFTWithTags(t *testing.T) { esProc, err = CreateElasticProcessor(esClient) require.Nil(t, err) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -183,12 +187,12 @@ func TestCreateNFTWithTags(t *testing.T) { } body = &dataBlock.Body{} - err = esProc.SaveTransactions(body, header, pool, coreAlteredAccounts, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, coreAlteredAccounts, testNumOfShards)) require.Nil(t, err) ids = append(ids, "XFxcXFxcXFxcXFxcXFxcXFxcXA==", "JycnJw==", "PDw8Pj4+JiYmJiYmJiYmJiYmJiYm") genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TagsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TagsIndex, true, genericResponse) require.Nil(t, err) tagsChecked = 0 diff --git a/integrationtests/delegators_test.go b/integrationtests/delegators_test.go index 79f00107..3517b26e 100644 --- a/integrationtests/delegators_test.go +++ b/integrationtests/delegators_test.go @@ -3,12 +3,13 @@ package integrationtests import ( + "context" + "encoding/hex" "math/big" "testing" "time" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -40,11 +41,11 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { // delegate delegatedValue, _ := big.NewInt(0).SetString("200000000000000000000", 10) - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Address: decodeAddress("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqhllllsajxzat"), Events: []*transaction.Event{ { @@ -59,23 +60,23 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{"9v/pLAXxUZJ4Oy1U+x5al/Xg5sebh1dYCRTeZwg/u68="} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.DelegatorsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.DelegatorsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/delegators/delegator-after-delegate.json"), string(genericResponse.Docs[0].Source)) // unDelegate 1 unDelegatedValue, _ := big.NewInt(0).SetString("50000000000000000000", 10) totalDelegation, _ := big.NewInt(0).SetString("150000000000000000000", 10) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h2", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h2")), + Log: &transaction.Log{ Address: decodeAddress("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqhllllsajxzat"), Events: []*transaction.Event{ { @@ -91,21 +92,21 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { } header.TimeStamp = 5050 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.DelegatorsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.DelegatorsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/delegators/delegator-after-un-delegate-1.json"), string(genericResponse.Docs[0].Source)) // unDelegate 2 unDelegatedValue, _ = big.NewInt(0).SetString("25500000000000000000", 10) totalDelegation, _ = big.NewInt(0).SetString("124500000000000000000", 10) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h3", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h3")), + Log: &transaction.Log{ Address: decodeAddress("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqhllllsajxzat"), Events: []*transaction.Event{ { @@ -121,10 +122,10 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { } header.TimeStamp = 5060 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.DelegatorsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.DelegatorsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/delegators/delegator-after-un-delegate-2.json"), string(genericResponse.Docs[0].Source)) time.Sleep(time.Second) @@ -135,17 +136,17 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { require.Nil(t, err) time.Sleep(time.Second) - err = esClient.DoMultiGet(ids, indexerdata.DelegatorsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.DelegatorsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/delegators/delegator-after-revert.json"), string(genericResponse.Docs[0].Source)) // withdraw withdrawValue, _ := big.NewInt(0).SetString("725500000000000000000", 10) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h4", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h4")), + Log: &transaction.Log{ Address: decodeAddress("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqhllllsajxzat"), Events: []*transaction.Event{ { @@ -161,10 +162,10 @@ func TestDelegateUnDelegateAndWithdraw(t *testing.T) { } header.TimeStamp = 5070 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.DelegatorsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.DelegatorsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/delegators/delegator-after-withdraw.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/esdtTransfer_test.go b/integrationtests/esdtTransfer_test.go index 81c98778..285802d8 100644 --- a/integrationtests/esdtTransfer_test.go +++ b/integrationtests/esdtTransfer_test.go @@ -3,11 +3,11 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -84,24 +84,31 @@ func TestESDTTransferTooMuchGasProvided(t *testing.T) { } initialPaidFee, _ := big.NewInt(0).SetString("104000110000000", 10) - tx := outport.NewTransactionHandlerWithGasAndFee(txESDT, 104011, big.NewInt(104000110000000)) - tx.SetInitialPaidFee(initialPaidFee) + txInfo := &outport.TxInfo{ + Transaction: txESDT, + FeeInfo: &outport.FeeInfo{ + GasUsed: 104011, + Fee: initialPaidFee, + InitialPaidFee: big.NewInt(104000110000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/esdtTransfer/esdt-transfer.json"), string(genericResponse.Docs[0].Source)) diff --git a/integrationtests/issueTokenAndSetRoles_test.go b/integrationtests/issueTokenAndSetRoles_test.go index 8e4f8a42..31ae30ca 100644 --- a/integrationtests/issueTokenAndSetRoles_test.go +++ b/integrationtests/issueTokenAndSetRoles_test.go @@ -3,11 +3,13 @@ package integrationtests import ( + "context" + "encoding/hex" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -33,11 +35,12 @@ func TestIssueTokenAndSetRole(t *testing.T) { address1 := "erd1k04pxr6c0gvlcx4rd5fje0a4uy33axqxwz0fpcrgtfdy3nrqauqqgvxprv" address2 := "erd1suhxyflu4w4pqdxmushpxzc6a3qszr89m8uswzqcvyh0mh9mzxwqdwkm0x" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -56,21 +59,22 @@ func TestIssueTokenAndSetRole(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids := []string{"TOK-abcd"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-issue-ok.json"), string(genericResponse.Docs[0].Source)) // SET ROLES - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -85,21 +89,22 @@ func TestIssueTokenAndSetRole(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"TOK-abcd"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-set-role.json"), string(genericResponse.Docs[0].Source)) // TRANSFER ROLE - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -118,21 +123,22 @@ func TestIssueTokenAndSetRole(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"TOK-abcd"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-transfer-role.json"), string(genericResponse.Docs[0].Source)) // UNSET ROLES - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -147,12 +153,12 @@ func TestIssueTokenAndSetRole(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids = []string{"TOK-abcd"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-unset-role.json"), string(genericResponse.Docs[0].Source)) } @@ -175,11 +181,12 @@ func TestIssueSetRolesEventAndAfterTokenIssue(t *testing.T) { address1 := "erd1k04pxr6c0gvlcx4rd5fje0a4uy33axqxwz0fpcrgtfdy3nrqauqqgvxprv" // SET ROLES - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -194,21 +201,22 @@ func TestIssueSetRolesEventAndAfterTokenIssue(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids := []string{"TTT-abcd"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-set-roles-first.json"), string(genericResponse.Docs[0].Source)) // ISSUE - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -222,12 +230,12 @@ func TestIssueSetRolesEventAndAfterTokenIssue(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids = []string{"TTT-abcd"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueTokenAndSetRoles/token-after-set-roles-and-issue.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/issueToken_test.go b/integrationtests/issueToken_test.go index fadfdee4..04243666 100644 --- a/integrationtests/issueToken_test.go +++ b/integrationtests/issueToken_test.go @@ -3,11 +3,12 @@ package integrationtests import ( + "context" + "encoding/hex" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -33,11 +34,12 @@ func TestIssueTokenAndTransferOwnership(t *testing.T) { address1 := "erd1v7e552pz9py4hv6raan0c4jflez3e6csdmzcgrncg0qrnk4tywvsqx0h5j" address2 := "erd1acjlnuhkd8773sqhmw85r0ur4lcyuqgm0n69h9ttxh0gwxtuuzxq4lckh6" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -51,25 +53,26 @@ func TestIssueTokenAndTransferOwnership(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{"SSSS-abcd"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi.json"), string(genericResponse.Docs[0].Source)) - err = esClient.DoMultiGet(ids, indexerdata.ESDTsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.ESDTsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi.json"), string(genericResponse.Docs[0].Source)) // transfer ownership - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -84,23 +87,24 @@ func TestIssueTokenAndTransferOwnership(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi-after-transfer-ownership.json"), string(genericResponse.Docs[0].Source)) - err = esClient.DoMultiGet(ids, indexerdata.ESDTsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.ESDTsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi-after-transfer-ownership.json"), string(genericResponse.Docs[0].Source)) // do pause - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -115,19 +119,20 @@ func TestIssueTokenAndTransferOwnership(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi-after-pause.json"), string(genericResponse.Docs[0].Source)) // do unPause - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -142,10 +147,10 @@ func TestIssueTokenAndTransferOwnership(t *testing.T) { } header.TimeStamp = 10000 - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/issueToken/token-semi-after-un-pause.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/logsCrossShard_test.go b/integrationtests/logsCrossShard_test.go index cb3a3087..aa62f94a 100644 --- a/integrationtests/logsCrossShard_test.go +++ b/integrationtests/logsCrossShard_test.go @@ -3,12 +3,13 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -37,10 +38,11 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { logID := hex.EncodeToString([]byte("cross-log")) // index on source - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: logID, + Log: &transaction.Log{ Address: decodeAddress(address1), Events: []*transaction.Event{ { @@ -51,16 +53,15 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "cross-log", }, }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) ids := []string{logID} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.LogsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/logsCrossShard/log-at-source.json"), @@ -72,10 +73,11 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { Round: 50, TimeStamp: 6040, } - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: logID, + Log: &transaction.Log{ Address: decodeAddress(address1), Events: []*transaction.Event{ { @@ -92,14 +94,13 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "cross-log", }, }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.LogsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/logsCrossShard/log-at-destination.json"), @@ -111,10 +112,11 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { Round: 50, TimeStamp: 5000, } - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: logID, + Log: &transaction.Log{ Address: decodeAddress(address1), Events: []*transaction.Event{ { @@ -125,14 +127,13 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { nil, }, }, - TxHash: "cross-log", }, }, } - err = esProc.SaveTransactions(body, header, pool, map[string]*outport.AlteredAccount{}, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, map[string]*alteredAccount.AlteredAccount{}, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.LogsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/logsCrossShard/log-at-destination.json"), @@ -158,7 +159,7 @@ func TestIndexLogSourceShardAndAfterDestinationAndAgainSource(t *testing.T) { err = esProc.RemoveTransactions(header, body) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.LogsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.LogsIndex, true, genericResponse) require.Nil(t, err) require.False(t, genericResponse.Docs[0].Found) diff --git a/integrationtests/miniblocks_test.go b/integrationtests/miniblocks_test.go new file mode 100644 index 00000000..3814964e --- /dev/null +++ b/integrationtests/miniblocks_test.go @@ -0,0 +1,124 @@ +//go:build integrationtests + +package integrationtests + +import ( + "context" + "testing" + + dataBlock "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/marshal" + indexerdata "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" + "github.com/stretchr/testify/require" +) + +func TestIndexMiniBlocksOnSourceAndDestination(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + esProc, err := CreateElasticProcessor(esClient) + require.Nil(t, err) + + // index on the source shard + header := &dataBlock.Header{ + ShardID: 1, + TimeStamp: 1234, + } + miniBlocks := []*dataBlock.MiniBlock{ + { + SenderShardID: 1, + ReceiverShardID: 2, + }, + } + err = esProc.SaveMiniblocks(header, miniBlocks) + require.Nil(t, err) + mbHash := "11a1bb4065e16a2e93b2b5ac5957b7b69f1cfba7579b170b24f30dab2d3162e0" + ids := []string{mbHash} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.MiniblocksIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/miniblocks/cross-miniblock-on-source.json"), string(genericResponse.Docs[0].Source)) + + // index on the destination shard + mbhr := &dataBlock.MiniBlockHeaderReserved{ + ExecutionType: dataBlock.ProcessingType(1), + } + + marshaller := &marshal.GogoProtoMarshalizer{} + mbhrBytes, _ := marshaller.Marshal(mbhr) + header = &dataBlock.Header{ + ShardID: 2, + TimeStamp: 1234, + MiniBlockHeaders: []dataBlock.MiniBlockHeader{ + { + Reserved: mbhrBytes, + }, + }, + } + + err = esProc.SaveMiniblocks(header, miniBlocks) + require.Nil(t, err) + + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.MiniblocksIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/miniblocks/cross-miniblock-on-destination.json"), string(genericResponse.Docs[0].Source)) +} + +func TestIndexMiniBlockFirstOnDestinationAndAfterSource(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + esProc, err := CreateElasticProcessor(esClient) + require.Nil(t, err) + + // index on destination + mbhr := &dataBlock.MiniBlockHeaderReserved{ + ExecutionType: dataBlock.ProcessingType(2), + } + + marshaller := &marshal.GogoProtoMarshalizer{} + mbhrBytes, _ := marshaller.Marshal(mbhr) + header := &dataBlock.Header{ + ShardID: 0, + TimeStamp: 54321, + MiniBlockHeaders: []dataBlock.MiniBlockHeader{ + { + Reserved: mbhrBytes, + }, + }, + } + miniBlocks := []*dataBlock.MiniBlock{ + { + SenderShardID: 2, + ReceiverShardID: 0, + }, + } + + err = esProc.SaveMiniblocks(header, miniBlocks) + require.Nil(t, err) + genericResponse := &GenericResponse{} + + ids := []string{"2f3ee0ff3b6426916df3b123a10f425b7e2027e2ae8d231229d27b12aa522ade"} + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.MiniblocksIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/miniblocks/cross-miniblock-on-destination-first.json"), string(genericResponse.Docs[0].Source)) + + // index on source + mbhr = &dataBlock.MiniBlockHeaderReserved{ + ExecutionType: dataBlock.ProcessingType(0), + } + mbhrBytes, _ = marshaller.Marshal(mbhr) + header.ShardID = 2 + header.MiniBlockHeaders = []dataBlock.MiniBlockHeader{ + { + Reserved: mbhrBytes, + }, + } + err = esProc.SaveMiniblocks(header, miniBlocks) + require.Nil(t, err) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.MiniblocksIndex, true, genericResponse) + require.Nil(t, err) + require.JSONEq(t, readExpectedResult("./testdata/miniblocks/cross-miniblock-on-source-second.json"), string(genericResponse.Docs[0].Source)) +} diff --git a/integrationtests/multiTransferWithScCallAndErrorSignaledBySC_test.go b/integrationtests/multiTransferWithScCallAndErrorSignaledBySC_test.go index e5ef68ac..9c037646 100644 --- a/integrationtests/multiTransferWithScCallAndErrorSignaledBySC_test.go +++ b/integrationtests/multiTransferWithScCallAndErrorSignaledBySC_test.go @@ -3,11 +3,11 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -64,7 +64,7 @@ func TestMultiTransferCrossShardAndScCallErrorSignaledBySC(t *testing.T) { OriginalTxHash: txHash, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 79, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address1), @@ -72,22 +72,31 @@ func TestMultiTransferCrossShardAndScCallErrorSignaledBySC(t *testing.T) { GasPrice: 1000000000, Data: []byte("MultiESDTNFTTransfer@000000000000000005005ebeb3515cb42056a81d42adaf756a3f63a360bfb055@02@5745474c442d626434643739@@38e62046fb1a0000@584d45582d666461333535@07@048907e58284c28e898e29@6164644c697175696469747950726f7879@00000000000000000500ebd304c2f34a6b3f6a57c133ab7b8c6f81dc40155483@38d78f595785c000@0487deac313c6f6b111906"), Value: big.NewInt(0), - }, 150000000, big.NewInt(1904415000000000)) - tx.SetInitialPaidFee(big.NewInt(1904415000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 150000000, + Fee: big.NewInt(1904415000000000), + InitialPaidFee: big.NewInt(1904415000000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -126,15 +135,15 @@ func TestMultiTransferCrossShardAndScCallErrorSignaledBySC(t *testing.T) { ReturnMessage: []byte("error signalled by smartcontract"), } - pool = &outport.Pool{ - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), + pool = &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, }, - Logs: []*coreData.LogData{ + Logs: []*outport.LogData{ { - TxHash: string(scrHash1), - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString(scrHash1), + Log: &transaction.Log{ Address: decodeAddress(address2), Events: []*transaction.Event{ { @@ -151,12 +160,12 @@ func TestMultiTransferCrossShardAndScCallErrorSignaledBySC(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{hex.EncodeToString(txHash)} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/nftIssueCreateBurn_test.go b/integrationtests/nftIssueCreateBurn_test.go index 341ed977..02fda26b 100644 --- a/integrationtests/nftIssueCreateBurn_test.go +++ b/integrationtests/nftIssueCreateBurn_test.go @@ -3,12 +3,13 @@ package integrationtests import ( + "context" + "encoding/hex" "encoding/json" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -36,11 +37,12 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { } address1 := "erd1ju8pkvg57cwdmjsjx58jlmnuf4l9yspstrhr9tgsrt98n9edpm2qtlgy99" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -54,12 +56,12 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{"NON-abcd"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/nftIssueCreateBurn/non-fungible-after-issue.json"), string(genericResponse.Docs[0].Source)) @@ -80,11 +82,11 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { } esdtDataBytes, _ := json.Marshal(esdtData) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Address: decodeAddress(address1), Events: []*transaction.Event{ { @@ -99,12 +101,12 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NON-abcd-02"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/nftIssueCreateBurn/non-fungible-after-create.json"), string(genericResponse.Docs[0].Source)) @@ -116,11 +118,12 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { ShardID: 0, } - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - TxHash: "h1", - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(address1), Events: []*transaction.Event{ { Address: decodeAddress(address1), @@ -134,12 +137,12 @@ func TestIssueNFTCreateAndBurn(t *testing.T) { }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NON-abcd-02"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.False(t, genericResponse.Docs[0].Found) } diff --git a/integrationtests/nftTransferCrossShard_test.go b/integrationtests/nftTransferCrossShard_test.go index 0fc53429..45c961e7 100644 --- a/integrationtests/nftTransferCrossShard_test.go +++ b/integrationtests/nftTransferCrossShard_test.go @@ -3,11 +3,12 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/core" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -63,7 +64,7 @@ func TestNFTTransferCrossShardWithSCCall(t *testing.T) { OriginalTxHash: txHash, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 79, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address1), @@ -71,23 +72,33 @@ func TestNFTTransferCrossShardWithSCCall(t *testing.T) { GasPrice: 1000000000, Data: []byte("ESDTNFTTransfer@4c4b4641524d2d336634663962@016534@6f1e6f01bc7627f5ae@00000000000000000500f1c8f2fdc58a63c6b201fc2ed629962d3dfa33fe7ceb@636f6d706f756e645265776172647350726f7879@000000000000000005004f79ec44bb13372b5ac9d996d749120f476427627ceb"), Value: big.NewInt(0), - }, 150000000, big.NewInt(1904415000000000)) - tx.SetInitialPaidFee(big.NewInt(1904415000000000)) + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 150000000, + Fee: big.NewInt(1904415000000000), + InitialPaidFee: big.NewInt(1904415000000000), }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), + ExecutionOrder: 0, + } + + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, + }, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -108,24 +119,24 @@ func TestNFTTransferCrossShardWithSCCall(t *testing.T) { } scrWithRefund := []byte("scrWithRefund") refundValueBig, _ := big.NewInt(0).SetString("101676480000000", 10) - poolDstShard := &outport.Pool{ - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), - string(scrWithRefund): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + poolDstShard := &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrWithRefund): {SmartContractResult: &smartContractResult.SmartContractResult{ SndAddr: decodeAddress(address2), RcvAddr: decodeAddress(address1), PrevTxHash: []byte("f639cb7a0231191e04ec19dcb1359bd93a03fe8dc4a28a80d00835c5d1c988f8"), OriginalTxHash: txHash, Value: refundValueBig, Data: []byte("@6f6b@017d15@0000000e4d45584641524d2d6239336536300000000000017d15000000097045173cc97554b65d@0178af"), - }, 139832352, big.NewInt(1802738520000000)), + }, FeeInfo: &outport.FeeInfo{GasUsed: 139832352, Fee: big.NewInt(1802738520000000)}}, }, } - err = esProc.SaveTransactions(bodyDstShard, header, poolDstShard, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(bodyDstShard, header, poolDstShard, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -134,7 +145,7 @@ func TestNFTTransferCrossShardWithSCCall(t *testing.T) { ) genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.OperationsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.OperationsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json"), @@ -187,7 +198,7 @@ func TestNFTTransferCrossShard(t *testing.T) { OriginalTxHash: txHash, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 79, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address1), @@ -195,16 +206,25 @@ func TestNFTTransferCrossShard(t *testing.T) { GasPrice: 1000000000, Data: []byte("ESDTNFTTransfer@536f6d657468696e672d616263646566@01@01@00000000000000000500a7a02771aa07090e607f02b25f4d6d241bff32b990a2"), Value: big.NewInt(0), - }, 963500, big.NewInt(235850000000000)) - tx.SetInitialPaidFee(big.NewInt(276215000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 963500, + Fee: big.NewInt(235850000000000), + InitialPaidFee: big.NewInt(276215000000000), + }, + ExecutionOrder: 0, + } refundValueBig, _ := big.NewInt(0).SetString("40365000000000", 10) - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: &smartContractResult.SmartContractResult{ Nonce: 80, Value: refundValueBig, GasPrice: 1000000000, @@ -213,16 +233,16 @@ func TestNFTTransferCrossShard(t *testing.T) { Data: []byte("@6f6b"), PrevTxHash: txHash, OriginalTxHash: txHash, - }, 0, big.NewInt(0)), - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -242,23 +262,37 @@ func TestNFTTransferCrossShard(t *testing.T) { }, } scr3WithErrHash := []byte("scrWithError") - poolDstShard := &outport.Pool{ - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), - string(scr3WithErrHash): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + poolDstShard := &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scr3WithErrHash): {SmartContractResult: &smartContractResult.SmartContractResult{ SndAddr: decodeAddress(address2), RcvAddr: decodeAddress(address1), PrevTxHash: []byte("1546eb9970a6dc1710b6528274e75d5095c1349706f4ff70f52a1f58e1156316"), OriginalTxHash: txHash, Data: []byte("ESDTNFTTransfer@434f4c45435449452d323663313838@01@01@08011202000122e50108011204434f4f4c1a20e0f3ecf555f63f2d101241dfc98b4614aff9284edd50b46a1c6e36b83558744d20c4132a2e516d5a7961565631786a7866446255575a503178655a7676544d3156686f61346f594752444d706d4a727a52435a324368747470733a2f2f697066732e696f2f697066732f516d5a7961565631786a7866446255575a503178655a7676544d3156686f61346f594752444d706d4a727a52435a3a41746167733a436f6f6c3b6d657461646174613a516d5869417850396e535948515954546143357358717a4d32645856334142516145355241725932777a4e686179@75736572206572726f72"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, + }, + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString(scr3WithErrHash), + Log: &transaction.Log{ + Address: decodeAddress(address1), + Events: []*transaction.Event{ + { + Address: decodeAddress(address1), + Identifier: []byte(core.InternalVMErrorsOperation), + }, + }, + }, + }, }, } - err = esProc.SaveTransactions(bodyDstShard, header, poolDstShard, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(bodyDstShard, header, poolDstShard, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -308,25 +342,39 @@ func TestNFTTransferCrossShardImportDBScenarioFirstIndexDestinationAfterSource(t }, } scr3WithErrHash := []byte("scrWithError") - poolDstShard := &outport.Pool{ - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), - string(scr3WithErrHash): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + poolDstShard := &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scr3WithErrHash): {SmartContractResult: &smartContractResult.SmartContractResult{ SndAddr: decodeAddress(address2), RcvAddr: decodeAddress(address1), PrevTxHash: []byte("1546eb9970a6dc1710b6528274e75d5095c1349706f4ff70f52a1f58e1156316"), OriginalTxHash: txHash, Data: []byte("ESDTNFTTransfer@434f4c4c454354494f4e2d323663313838@01@01@08011202000122e50108011204434f4f4c1a20e0f3ecf555f63f2d101241dfc98b4614aff9284edd50b46a1c6e36b83558744d20c4132a2e516d5a7961565631786a7866446255575a503178655a7676544d3156686f61346f594752444d706d4a727a52435a324368747470733a2f2f697066732e696f2f697066732f516d5a7961565631786a7866446255575a503178655a7676544d3156686f61346f594752444d706d4a727a52435a3a41746167733a436f6f6c3b6d657461646174613a516d5869417850396e535948515954546143357358717a4d32645856334142516145355241725932777a4e686179@75736572206572726f72"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, + }, + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString(scr3WithErrHash), + Log: &transaction.Log{ + Address: decodeAddress(address1), + Events: []*transaction.Event{ + { + Address: decodeAddress(address1), + Identifier: []byte(core.SignalErrorOperation), + }, + }, + }, + }, }, } ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esProc.SaveTransactions(bodyDstShard, header, poolDstShard, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(bodyDstShard, header, poolDstShard, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -353,7 +401,7 @@ func TestNFTTransferCrossShardImportDBScenarioFirstIndexDestinationAfterSource(t }, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 79, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address1), @@ -361,16 +409,25 @@ func TestNFTTransferCrossShardImportDBScenarioFirstIndexDestinationAfterSource(t GasPrice: 1000000000, Data: []byte("ESDTNFTTransfer@434f4c4c454354494f4e2d323663313838@01@01@00000000000000000500a7a02771aa07090e607f02b25f4d6d241bff32b990a2"), Value: big.NewInt(0), - }, 963500, big.NewInt(238820000000000)) - tx.SetInitialPaidFee(big.NewInt(595490000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 963500, + Fee: big.NewInt(238820000000000), + InitialPaidFee: big.NewInt(595490000000000), + }, + ExecutionOrder: 0, + } refundValueBig, _ := big.NewInt(0).SetString("40365000000000", 10) - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: &smartContractResult.SmartContractResult{ Nonce: 80, Value: refundValueBig, GasPrice: 1000000000, @@ -379,14 +436,14 @@ func TestNFTTransferCrossShardImportDBScenarioFirstIndexDestinationAfterSource(t Data: []byte("@6f6b"), PrevTxHash: txHash, OriginalTxHash: txHash, - }, 0, big.NewInt(0)), - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/nftTransferCrossWithScCall_test.go b/integrationtests/nftTransferCrossWithScCall_test.go index d725495f..5210506b 100644 --- a/integrationtests/nftTransferCrossWithScCall_test.go +++ b/integrationtests/nftTransferCrossWithScCall_test.go @@ -3,11 +3,11 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -60,7 +60,7 @@ func TestNFTTransferCrossShardWithScCall(t *testing.T) { OriginalTxHash: txHash, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 79, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address1), @@ -68,23 +68,32 @@ func TestNFTTransferCrossShardWithScCall(t *testing.T) { GasPrice: 1000000000, Data: []byte("ESDTNFTTransfer@4d45584641524d2d636362323532@078b@0347543e5b59c9be8670@00000000000000000500a7a02771aa07090e607f02b25f4d6d241bff32b990a2@636c61696d52657761726473"), Value: big.NewInt(0), - }, 5000000, big.NewInt(595490000000000)) - tx.SetInitialPaidFee(big.NewInt(595490000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 5000000, + Fee: big.NewInt(595490000000000), + InitialPaidFee: big.NewInt(595490000000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/nftTransferCrossShardWithScCall/cross-shard-transfer-with-sc-call.json"), diff --git a/integrationtests/relayedTx_test.go b/integrationtests/relayedTx_test.go index 9943a4de..67b758bc 100644 --- a/integrationtests/relayedTx_test.go +++ b/integrationtests/relayedTx_test.go @@ -3,11 +3,11 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -73,23 +73,30 @@ func TestRelayedTransactionGasUsedCrossShard(t *testing.T) { OriginalTxHash: txHash, } - tx := outport.NewTransactionHandlerWithGasAndFee(initialTx, 16610000, big.NewInt(1760000000000000)) - tx.SetInitialPaidFee(big.NewInt(1760000000000000)) + txInfo := &outport.TxInfo{ + Transaction: initialTx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 16610000, + Fee: big.NewInt(1760000000000000), + InitialPaidFee: big.NewInt(1760000000000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -110,10 +117,10 @@ func TestRelayedTransactionGasUsedCrossShard(t *testing.T) { } scrWithRefund := []byte("scrWithRefund") refundValueBig, _ := big.NewInt(0).SetString("86271830000000", 10) - poolDstShard := &outport.Pool{ - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), - string(scrWithRefund): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + poolDstShard := &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrWithRefund): {SmartContractResult: &smartContractResult.SmartContractResult{ Nonce: 3, SndAddr: decodeAddress(address3), RcvAddr: decodeAddress(address1), @@ -122,14 +129,14 @@ func TestRelayedTransactionGasUsedCrossShard(t *testing.T) { Value: refundValueBig, Data: []byte(""), ReturnMessage: []byte("gas refund for relayer"), - }, 7982817, big.NewInt(1673728170000000)), + }, FeeInfo: &outport.FeeInfo{GasUsed: 7982817, Fee: big.NewInt(1673728170000000)}}, }, } - err = esProc.SaveTransactions(bodyDstShard, header, poolDstShard, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(bodyDstShard, header, poolDstShard, nil, testNumOfShards)) require.Nil(t, err) - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -206,23 +213,31 @@ func TestRelayedTransactionIntraShard(t *testing.T) { Value: refundValueBig, } - tx := outport.NewTransactionHandlerWithGasAndFee(initialTx, 10556000, big.NewInt(2257820000000000)) - tx.SetInitialPaidFee(big.NewInt(2306320000000000)) - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + txInfo := &outport.TxInfo{ + Transaction: initialTx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 10556000, + Fee: big.NewInt(2257820000000000), + InitialPaidFee: big.NewInt(2306320000000000), + }, + ExecutionOrder: 0, + } + + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(scr1, 0, big.NewInt(0)), - string(scrHash2): outport.NewTransactionHandlerWithGasAndFee(scr2, 0, big.NewInt(0)), + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: scr1, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(scrHash2): {SmartContractResult: scr2, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/scCallIntraShard_test.go b/integrationtests/scCallIntraShard_test.go index e9f44e4d..ce31024d 100644 --- a/integrationtests/scCallIntraShard_test.go +++ b/integrationtests/scCallIntraShard_test.go @@ -3,11 +3,12 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/core" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" @@ -45,7 +46,7 @@ func TestTransactionWithSCCallFail(t *testing.T) { address1 := "erd1ure7ea247clj6yqjg80unz6xzjhlj2zwm4gtg6sudcmtsd2cw3xs74hasv" address2 := "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqfhllllscrt56r" refundValueBig, _ := big.NewInt(0).SetString("5000000000000000000", 10) - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 46, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address2), @@ -53,15 +54,24 @@ func TestTransactionWithSCCallFail(t *testing.T) { GasPrice: 1000000000, Data: []byte("delegate"), Value: refundValueBig, - }, 12000000, big.NewInt(181380000000000)) - tx.SetInitialPaidFee(big.NewInt(181380000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 12000000, + Fee: big.NewInt(181380000000000), + InitialPaidFee: big.NewInt(181380000000000), + }, + ExecutionOrder: 0, + } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: &smartContractResult.SmartContractResult{ Nonce: 46, Value: refundValueBig, GasPrice: 0, @@ -71,15 +81,29 @@ func TestTransactionWithSCCallFail(t *testing.T) { PrevTxHash: txHash, OriginalTxHash: txHash, ReturnMessage: []byte("total delegation cap reached"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, + }, + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString(txHash), + Log: &transaction.Log{ + Address: decodeAddress(address1), + Events: []*transaction.Event{ + { + Address: decodeAddress(address1), + Identifier: []byte(core.SignalErrorOperation), + }, + }, + }, + }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerData.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, @@ -116,7 +140,7 @@ func TestTransactionWithScCallSuccess(t *testing.T) { address1 := "erd1ure7ea247clj6yqjg80unz6xzjhlj2zwm4gtg6sudcmtsd2cw3xs74hasv" address2 := "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqfhllllscrt56r" - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 101, SndAddr: decodeAddress(address1), RcvAddr: decodeAddress(address2), @@ -124,16 +148,25 @@ func TestTransactionWithScCallSuccess(t *testing.T) { GasPrice: 1000000000, Data: []byte("claimRewards"), Value: big.NewInt(0), - }, 33891715, big.NewInt(406237150000000)) - tx.SetInitialPaidFee(big.NewInt(2567320000000000)) + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 33891715, + Fee: big.NewInt(406237150000000), + InitialPaidFee: big.NewInt(2567320000000000), + }, + ExecutionOrder: 0, + } refundValueBig, _ := big.NewInt(0).SetString("2161082850000000", 10) - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scrHash1): outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrHash1): {SmartContractResult: &smartContractResult.SmartContractResult{ Nonce: 102, Value: refundValueBig, GasPrice: 1000000000, @@ -142,15 +175,15 @@ func TestTransactionWithScCallSuccess(t *testing.T) { Data: []byte("@6f6b"), PrevTxHash: txHash, OriginalTxHash: txHash, - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerData.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/scCallWithIssueEsdt_test.go b/integrationtests/scCallWithIssueEsdt_test.go new file mode 100644 index 00000000..3c3b5548 --- /dev/null +++ b/integrationtests/scCallWithIssueEsdt_test.go @@ -0,0 +1,240 @@ +//go:build integrationtests + +package integrationtests + +import ( + "context" + "encoding/hex" + "math/big" + "testing" + + "github.com/multiversx/mx-chain-core-go/core" + dataBlock "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/smartContractResult" + "github.com/multiversx/mx-chain-core-go/data/transaction" + "github.com/multiversx/mx-chain-core-go/data/vm" + indexerData "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" + "github.com/stretchr/testify/require" +) + +func TestScCallIntraShardWithIssueESDT(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + esProc, err := CreateElasticProcessor(esClient) + require.Nil(t, err) + + txHash := []byte("txWithScCall") + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + } + scrWithIssueHash := []byte("scrWithIssue") + body := &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + Type: dataBlock.TxBlock, + SenderShardID: 0, + ReceiverShardID: 0, + TxHashes: [][]byte{txHash}, + }, + { + Type: dataBlock.SmartContractResultBlock, + SenderShardID: 0, + ReceiverShardID: core.MetachainShardId, + TxHashes: [][]byte{scrWithIssueHash}, + }, + }, + } + + sndAddress := "erd148m2sx48mfm8322c2kpfmgj78g5j0x7r6z6y4z8j28qk45a74nwq5pq2ts" + contractAddress := "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g" + tx := &transaction.Transaction{ + Nonce: 46, + SndAddr: decodeAddress(sndAddress), + RcvAddr: decodeAddress(contractAddress), + GasLimit: 75_000_000, + GasPrice: 1000000000, + Data: []byte("issueToken@4D79546573744E667464@544553544E4654"), + Value: big.NewInt(50000000000000000), + } + + esdtSystemSC := "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u" + scrWithIssueESDT := &smartContractResult.SmartContractResult{ + Nonce: 0, + SndAddr: decodeAddress(contractAddress), + RcvAddr: decodeAddress(esdtSystemSC), + OriginalTxHash: txHash, + PrevTxHash: txHash, + Data: []byte("issueNonFungible@4d79546573744e667464@544553544e4654@63616e467265657a65@74727565@63616e57697065@74727565@63616e5061757365@74727565@63616e4368616e67654f776e6572@66616c7365@63616e55706772616465@66616c7365@63616e4164645370656369616c526f6c6573@74727565@58f638"), + Value: big.NewInt(50000000000000000), + CallType: vm.AsynchronousCall, + } + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 75_000_000, + Fee: big.NewInt(867810000000000), + InitialPaidFee: big.NewInt(867810000000000), + }, + } + + scrInfoWithIssue := &outport.SCRInfo{SmartContractResult: scrWithIssueESDT, FeeInfo: &outport.FeeInfo{}} + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, + }, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrWithIssueHash): scrInfoWithIssue, + }, + } + + // ############################ + // execute on the source shard + // ############################ + + header.ShardID = 0 + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids := []string{hex.EncodeToString(txHash)} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/tx-after-execution-on-source-shard.json"), + string(genericResponse.Docs[0].Source), + ) + + ids = []string{hex.EncodeToString(scrWithIssueHash)} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.OperationsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-source-shard.json"), + string(genericResponse.Docs[0].Source), + ) + + // ############################ + // execute scr on the destination shard (metachain) + // ############################ + + scrWithCallBackHash := []byte("scrWithCallback") + header.ShardID = core.MetachainShardId + body = &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + Type: dataBlock.SmartContractResultBlock, + SenderShardID: 0, + ReceiverShardID: core.MetachainShardId, + TxHashes: [][]byte{scrWithIssueHash}, + }, + { + Type: dataBlock.SmartContractResultBlock, + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + TxHashes: [][]byte{scrWithCallBackHash}, + }, + }, + } + + scrWithCallBack := &smartContractResult.SmartContractResult{ + Nonce: 0, + Value: big.NewInt(0), + SndAddr: decodeAddress(esdtSystemSC), + RcvAddr: decodeAddress(contractAddress), + Data: []byte("@00@544553544e46542d643964353463"), + OriginalTxHash: txHash, + PrevTxHash: scrWithIssueHash, + CallType: vm.AsynchronousCallBack, + } + scrInfoWithCallBack := &outport.SCRInfo{SmartContractResult: scrWithCallBack, FeeInfo: &outport.FeeInfo{}} + pool = &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrWithIssueHash): scrInfoWithIssue, + hex.EncodeToString(scrWithCallBackHash): scrInfoWithCallBack, + }, + } + + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids = []string{hex.EncodeToString(scrWithIssueHash), hex.EncodeToString(scrWithCallBackHash)} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.OperationsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-destination-shard.json"), + string(genericResponse.Docs[0].Source), + ) + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-source.json"), + string(genericResponse.Docs[1].Source), + ) + + // ############################ + // execute scr with callback on the destination shard (0) + // ############################ + header.ShardID = 0 + body = &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + Type: dataBlock.SmartContractResultBlock, + SenderShardID: core.MetachainShardId, + ReceiverShardID: 0, + TxHashes: [][]byte{scrWithCallBackHash}, + }, + }, + } + pool = &outport.TransactionPool{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scrWithCallBackHash): scrInfoWithCallBack, + }, + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString(scrWithCallBackHash), + Log: &transaction.Log{ + Address: decodeAddress(contractAddress), + Events: []*transaction.Event{ + { + Address: decodeAddress(contractAddress), + Identifier: []byte(core.SignalErrorOperation), + }, + { + Address: decodeAddress(contractAddress), + Identifier: []byte(core.InternalVMErrorsOperation), + }, + }, + }, + }, + }, + } + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids = []string{hex.EncodeToString(txHash)} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/tx-after-execution-of-callback-on-destination-shard.json"), + string(genericResponse.Docs[0].Source), + ) + + ids = []string{hex.EncodeToString(txHash), hex.EncodeToString(scrWithCallBackHash)} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.OperationsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/tx-in-op-index-execution-of-callback-on-destination-shard.json"), + string(genericResponse.Docs[0].Source), + ) + require.JSONEq(t, + readExpectedResult("./testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-destination-shard.json"), + string(genericResponse.Docs[1].Source), + ) +} diff --git a/integrationtests/scDeploy_test.go b/integrationtests/scDeploy_test.go new file mode 100644 index 00000000..4a1603bb --- /dev/null +++ b/integrationtests/scDeploy_test.go @@ -0,0 +1,203 @@ +//go:build integrationtests + +package integrationtests + +import ( + "context" + "encoding/hex" + "math/big" + "testing" + + "github.com/multiversx/mx-chain-core-go/core" + dataBlock "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/transaction" + indexerData "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" + "github.com/stretchr/testify/require" +) + +func TestTransactionWithSCDeploy(t *testing.T) { + setLogLevelDebug() + + esClient, err := createESClient(esURL) + require.Nil(t, err) + + esProc, err := CreateElasticProcessor(esClient) + require.Nil(t, err) + + txHash := []byte("scDeployHash") + header := &dataBlock.Header{ + Round: 50, + TimeStamp: 5040, + ShardID: 2, + } + body := &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + Type: dataBlock.TxBlock, + SenderShardID: 2, + ReceiverShardID: 2, + TxHashes: [][]byte{txHash}, + }, + }, + } + sndAddress := "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0" + tx := &transaction.Transaction{ + Nonce: 1, + SndAddr: decodeAddress(sndAddress), + RcvAddr: decodeAddress("erd1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq6gq4hu"), + GasLimit: 1000000000, + GasPrice: 2000000, + Data: []byte("0061736d01000000010d036000006000017f60027f7f00023e0303656e760f6765744e756d417267756d656e7473000103656e760b7369676e616c4572726f72000203656e760e636865636b4e6f5061796d656e74000003030200000503010003060f027f00419980080b7f0041a080080b073705066d656d6f7279020004696e697400030863616c6c4261636b00040a5f5f646174615f656e6403000b5f5f686561705f6261736503010a180212001002100004404180800841191001000b0b0300010b0b210100418080080b1977726f6e67206e756d626572206f6620617267756d656e7473@0500@0502"), + Value: big.NewInt(0), + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 1130820, + Fee: big.NewInt(764698200000000), + InitialPaidFee: big.NewInt(773390000000000), + }, + ExecutionOrder: 0, + } + + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, + }, + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: decodeAddress(sndAddress), + Events: []*transaction.Event{ + { + Address: decodeAddress(sndAddress), + Identifier: []byte(core.SCDeployIdentifier), + Topics: [][]byte{decodeAddress("erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"), decodeAddress("erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0"), []byte("codeHash")}, + }, + nil, + }, + }, + }, + }, + } + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids := []string{hex.EncodeToString(txHash)} + genericResponse := &GenericResponse{} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scDeploy/tx-sc-deploy.json"), + string(genericResponse.Docs[0].Source), + ) + + ids = []string{"erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.SCDeploysIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scDeploy/deploy.json"), + string(genericResponse.Docs[0].Source), + ) + + // UPGRADE contract + header.TimeStamp = 6000 + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString([]byte("h2")), + Log: &transaction.Log{ + Address: decodeAddress(sndAddress), + Events: []*transaction.Event{ + { + Address: decodeAddress(sndAddress), + Identifier: []byte(core.SCUpgradeIdentifier), + Topics: [][]byte{decodeAddress("erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"), decodeAddress("erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0"), []byte("secondCodeHash")}, + }, + nil, + }, + }, + }, + }, + } + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids = []string{"erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.SCDeploysIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scDeploy/deploy-after-upgrade.json"), + string(genericResponse.Docs[0].Source), + ) + + // CHANGE owner first + header.TimeStamp = 7000 + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString([]byte("h3")), + Log: &transaction.Log{ + Address: decodeAddress(sndAddress), + Events: []*transaction.Event{ + { + Address: decodeAddress("erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"), + Identifier: []byte(core.BuiltInFunctionChangeOwnerAddress), + Topics: [][]byte{decodeAddress("erd1d942l8w4yvgjffpqacs8vdwl0mndsv0zn0uxa80hxc3xmq4477eqnyw3dh")}, + }, + nil, + }, + }, + }, + }, + } + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids = []string{"erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.SCDeploysIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scDeploy/deploy-after-upgrade-and-change-owner.json"), + string(genericResponse.Docs[0].Source), + ) + + // CHANGE owner second + header.TimeStamp = 8000 + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ + { + TxHash: hex.EncodeToString([]byte("h4")), + Log: &transaction.Log{ + Address: decodeAddress(sndAddress), + Events: []*transaction.Event{ + { + Address: decodeAddress("erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"), + Identifier: []byte(core.BuiltInFunctionChangeOwnerAddress), + Topics: [][]byte{decodeAddress("erd1y78ds2tvzw6ntcggldjld2vk96wgq0mj47mk6auny0nkvn242e3sd4qz7m")}, + }, + nil, + }, + }, + }, + }, + } + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) + require.Nil(t, err) + + ids = []string{"erd1qqqqqqqqqqqqqpgq4t2tqxpst9a6qttpak8cz8wvz6a0nses5qfqel6rhy"} + err = esClient.DoMultiGet(context.Background(), ids, indexerData.SCDeploysIndex, true, genericResponse) + require.Nil(t, err) + + require.JSONEq(t, + readExpectedResult("./testdata/scDeploy/deploy-after-upgrade-and-change-owner-second.json"), + string(genericResponse.Docs[0].Source), + ) +} diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json index 43990f8e..97a80077 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-first-update.json @@ -2,7 +2,6 @@ "address": "erd17umc0uvel62ng30k5uprqcxh3ue33hq608njejaqljuqzqlxtzuqeuzlcv", "balance": "0", "balanceNum": 0, - "totalBalanceWithStake": "0", "timestamp": 5600, "shardID": 2 } diff --git a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json index 06d0c7d4..3ebc00ce 100644 --- a/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json +++ b/integrationtests/testdata/accountsBalanceWithLowerTimestamp/account-balance-second-update.json @@ -3,6 +3,5 @@ "balance": "2000", "balanceNum": 0, "timestamp": 6000, - "totalBalanceWithStake": "2000", "shardID": 2 } diff --git a/integrationtests/testdata/claimRewards/tx-claim-rewards.json b/integrationtests/testdata/claimRewards/tx-claim-rewards.json index b4a76975..c11ef56c 100644 --- a/integrationtests/testdata/claimRewards/tx-claim-rewards.json +++ b/integrationtests/testdata/claimRewards/tx-claim-rewards.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "582fecdda564e76162bbb07d797c9ee3780a82fab226f5faced83a6cce2cf5d9", + "miniBlockHash": "d5f20802b232c47bd738db31df9046a0c3703d692f72104c4caf0e0a9506373f", "nonce": 617, "round": 50, "value": "0", diff --git a/integrationtests/testdata/esdtTransfer/esdt-transfer.json b/integrationtests/testdata/esdtTransfer/esdt-transfer.json index b23c9761..871e60a6 100644 --- a/integrationtests/testdata/esdtTransfer/esdt-transfer.json +++ b/integrationtests/testdata/esdtTransfer/esdt-transfer.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "1ecea6dff9ab9a785a2d55720e88c1bbd7d9c56310a035d16163e373879cd0e1", + "miniBlockHash": "0787323c7f992341d261e87d3ec662b79772732832e6bcf8d1a8a9949006b6af", "nonce": 6, "round": 50, "value": "0", diff --git a/integrationtests/testdata/issueTokenAndSetRoles/token-after-unset-role.json b/integrationtests/testdata/issueTokenAndSetRoles/token-after-unset-role.json index f0fd19fd..42c45bfe 100644 --- a/integrationtests/testdata/issueTokenAndSetRoles/token-after-unset-role.json +++ b/integrationtests/testdata/issueTokenAndSetRoles/token-after-unset-role.json @@ -15,8 +15,7 @@ "roles": { "ESDTRoleNFTCreate": [ "erd1suhxyflu4w4pqdxmushpxzc6a3qszr89m8uswzqcvyh0mh9mzxwqdwkm0x" - ], - "ESDTRoleNFTBurn": [] + ] }, "properties": { "canMint": false, diff --git a/integrationtests/testdata/miniblocks/cross-miniblock-on-destination-first.json b/integrationtests/testdata/miniblocks/cross-miniblock-on-destination-first.json new file mode 100644 index 00000000..a18dfa1e --- /dev/null +++ b/integrationtests/testdata/miniblocks/cross-miniblock-on-destination-first.json @@ -0,0 +1,8 @@ +{ + "procTypeD": "Processed", + "receiverShard": 0, + "senderShard": 2, + "receiverBlockHash": "b36435faaa72390772da84f418348ce0d477c74432579519bf0ffea1dc4c36e9", + "type": "TxBlock", + "timestamp": 54321 +} diff --git a/integrationtests/testdata/miniblocks/cross-miniblock-on-destination.json b/integrationtests/testdata/miniblocks/cross-miniblock-on-destination.json new file mode 100644 index 00000000..6baab6f0 --- /dev/null +++ b/integrationtests/testdata/miniblocks/cross-miniblock-on-destination.json @@ -0,0 +1,10 @@ +{ + "procTypeS": "Normal", + "senderBlockHash": "3fede8a9a3c4f2ba6d7e6e01541813606cd61c4d3af2940f8e089827b5d94e50", + "receiverShard": 2, + "senderShard": 1, + "type": "TxBlock", + "timestamp": 1234, + "receiverBlockHash": "d7f1e8003a45c7adbd87bbbb269cb4af3d1f4aedd0c214973bfc096dd0f3b65e", + "procTypeD": "Scheduled" +} diff --git a/integrationtests/testdata/miniblocks/cross-miniblock-on-source-second.json b/integrationtests/testdata/miniblocks/cross-miniblock-on-source-second.json new file mode 100644 index 00000000..0ae4076f --- /dev/null +++ b/integrationtests/testdata/miniblocks/cross-miniblock-on-source-second.json @@ -0,0 +1,10 @@ +{ + "procTypeD": "Processed", + "receiverShard": 0, + "senderShard": 2, + "receiverBlockHash": "b36435faaa72390772da84f418348ce0d477c74432579519bf0ffea1dc4c36e9", + "type": "TxBlock", + "timestamp": 54321, + "senderBlockHash": "b601381e1f41df2aa3da9f2b8eb169f14c86418229e30fc65f9e6b37b7f0d902", + "procTypeS": "Normal" +} diff --git a/integrationtests/testdata/miniblocks/cross-miniblock-on-source.json b/integrationtests/testdata/miniblocks/cross-miniblock-on-source.json new file mode 100644 index 00000000..98dce8aa --- /dev/null +++ b/integrationtests/testdata/miniblocks/cross-miniblock-on-source.json @@ -0,0 +1,8 @@ +{ + "procTypeS": "Normal", + "senderBlockHash": "3fede8a9a3c4f2ba6d7e6e01541813606cd61c4d3af2940f8e089827b5d94e50", + "receiverShard": 2, + "senderShard": 1, + "type": "TxBlock", + "timestamp": 1234 +} diff --git a/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-after-execution-of-scr-dst-shard.json b/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-after-execution-of-scr-dst-shard.json index e475e9f7..032af67c 100644 --- a/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-after-execution-of-scr-dst-shard.json +++ b/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-after-execution-of-scr-dst-shard.json @@ -18,7 +18,7 @@ "initialPaidFee": "1904415000000000", "gasLimit": 150000000, "gasUsed": 150000000, - "miniBlockHash": "2ea5bfe2968c98f3fcb059c6dcb3b0b42863ebe957748649f566db12548d2091", + "miniBlockHash": "71a5272ba7198b74f8a59cb9db4a270d2cfb1a7aefacbe17a27d1aa6f0d60b57", "receivers": [ "erd1qqqqqqqqqqqqqpgqt6ltx52ukss9d2qag2k67at28a36xc9lkp2sr06394", "erd1qqqqqqqqqqqqqpgqt6ltx52ukss9d2qag2k67at28a36xc9lkp2sr06394" @@ -41,5 +41,6 @@ "receiverShard": 0, "operation": "MultiESDTNFTTransfer", "status": "fail", - "searchOrder": 0 + "searchOrder": 0, + "errorEvent": true } diff --git a/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-executed-on-source.json b/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-executed-on-source.json index b638fbe5..a86b0649 100644 --- a/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-executed-on-source.json +++ b/integrationtests/testdata/multiTransferWithScCallAndErrorSignaledBySC/transaction-executed-on-source.json @@ -18,7 +18,7 @@ "initialPaidFee": "1904415000000000", "gasLimit": 150000000, "gasUsed": 150000000, - "miniBlockHash": "2ea5bfe2968c98f3fcb059c6dcb3b0b42863ebe957748649f566db12548d2091", + "miniBlockHash": "71a5272ba7198b74f8a59cb9db4a270d2cfb1a7aefacbe17a27d1aa6f0d60b57", "receivers": [ "erd1qqqqqqqqqqqqqpgqt6ltx52ukss9d2qag2k67at28a36xc9lkp2sr06394", "erd1qqqqqqqqqqqqqpgqt6ltx52ukss9d2qag2k67at28a36xc9lkp2sr06394" diff --git a/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json index 4cb70b38..32c4ff66 100644 --- a/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json +++ b/integrationtests/testdata/nftTransferCrossShard/op-nft-transfer-sc-call-after-refund.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "b30aaa656bf101a7fb87f6c02a9da9e70cd053a79de24f5d14276232757d9766", + "miniBlockHash": "d5a0138c5a29200f398cc8cb168a5b1cb8d4a16890746381b5cbed14c7bb379f", "nonce": 79, "round": 50, "value": "0", diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json b/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json index 9826d540..62168879 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-complete-with-status.json @@ -16,7 +16,7 @@ "nonce": 79, "gasLimit": 5000000, "gasUsed": 963500, - "miniBlockHash": "db7161a83f08489cba131e55f042536ee49116b622e33e70335a13e51a6c268c", + "miniBlockHash": "6d6986fad945a4587927692450372cc0d7aea2a7c1b5326b485fe0a9da0c0065", "round": 50, "hasScResults": true, "sender": "erd1ure7ea247clj6yqjg80unz6xzjhlj2zwm4gtg6sudcmtsd2cw3xs74hasv", @@ -35,5 +35,6 @@ "timestamp": 5040, "status": "fail", "initialPaidFee": "595490000000000", - "searchOrder": 0 + "searchOrder": 0, + "errorEvent": true } diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json index 45c02f7b..70f07153 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-failed-on-dst.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "83c60064098aa89220b5adc9d71f22b489bfc78cb3dcb516381102d7fec959e8", + "miniBlockHash": "1b39a9702116ba9020d928bf681c49ee4b462fcc8e3209fbf256459f97e6ef84", "nonce": 79, "round": 50, "value": "0", @@ -35,5 +35,6 @@ "receiversShardIDs": [ 2 ], - "operation": "ESDTNFTTransfer" + "operation": "ESDTNFTTransfer", + "errorEvent": true } diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json index f276884c..1223f2e7 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-after-refund.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "b30aaa656bf101a7fb87f6c02a9da9e70cd053a79de24f5d14276232757d9766", + "miniBlockHash": "d5a0138c5a29200f398cc8cb168a5b1cb8d4a16890746381b5cbed14c7bb379f", "nonce": 79, "round": 50, "value": "0", diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json index a5fa15f7..0342ec98 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer-sc-call-source.json @@ -16,7 +16,7 @@ "nonce": 79, "gasLimit": 150000000, "gasUsed": 150000000, - "miniBlockHash": "b30aaa656bf101a7fb87f6c02a9da9e70cd053a79de24f5d14276232757d9766", + "miniBlockHash": "d5a0138c5a29200f398cc8cb168a5b1cb8d4a16890746381b5cbed14c7bb379f", "round": 50, "hasScResults": true, "sender": "erd1ef9xx3k3m89azf4c4xc98wpcdnx5h0cnxy6em47r6dc4alud0uwqx24f50", diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json index bf3d4021..dc2e7dc7 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-nft-transfer.json @@ -17,7 +17,7 @@ "nonce": 79, "gasLimit": 5000000, "gasUsed": 963500, - "miniBlockHash": "83c60064098aa89220b5adc9d71f22b489bfc78cb3dcb516381102d7fec959e8", + "miniBlockHash": "1b39a9702116ba9020d928bf681c49ee4b462fcc8e3209fbf256459f97e6ef84", "round": 50, "hasScResults": true, "sender": "erd1ure7ea247clj6yqjg80unz6xzjhlj2zwm4gtg6sudcmtsd2cw3xs74hasv", diff --git a/integrationtests/testdata/nftTransferCrossShard/tx-with-status-only.json b/integrationtests/testdata/nftTransferCrossShard/tx-with-status-only.json index b0bfc6b1..c9a4c62a 100644 --- a/integrationtests/testdata/nftTransferCrossShard/tx-with-status-only.json +++ b/integrationtests/testdata/nftTransferCrossShard/tx-with-status-only.json @@ -17,5 +17,6 @@ "signature": "", "timestamp": 0, "status": "fail", - "searchOrder": 0 + "searchOrder": 0, + "errorEvent": true } diff --git a/integrationtests/testdata/nftTransferCrossShardWithScCall/cross-shard-transfer-with-sc-call.json b/integrationtests/testdata/nftTransferCrossShardWithScCall/cross-shard-transfer-with-sc-call.json index e2e13d6c..07f16691 100644 --- a/integrationtests/testdata/nftTransferCrossShardWithScCall/cross-shard-transfer-with-sc-call.json +++ b/integrationtests/testdata/nftTransferCrossShardWithScCall/cross-shard-transfer-with-sc-call.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "99a07aab4f6722a1473b33bd7bb35e339c69339c400737b14a94ad8bceaa1734", + "miniBlockHash": "60cb6c0185ada3fd5814611a0bc3bd63b47b4bffc78ecbce43adb4f32a19eac6", "nonce": 79, "round": 50, "value": "0", diff --git a/integrationtests/testdata/relayedTx/relayed-tx-after-refund.json b/integrationtests/testdata/relayedTx/relayed-tx-after-refund.json index a01337f9..dd85dc43 100644 --- a/integrationtests/testdata/relayedTx/relayed-tx-after-refund.json +++ b/integrationtests/testdata/relayedTx/relayed-tx-after-refund.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a", + "miniBlockHash": "333145179043d02bb47bacd7a1af58640fcffae7be284cb30b2295b41159bf04", "nonce": 1196667, "round": 50, "value": "0", diff --git a/integrationtests/testdata/relayedTx/relayed-tx-intra.json b/integrationtests/testdata/relayedTx/relayed-tx-intra.json index 6b77adc6..cf066e85 100644 --- a/integrationtests/testdata/relayedTx/relayed-tx-intra.json +++ b/integrationtests/testdata/relayedTx/relayed-tx-intra.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "2709174224d13e49fd76a70b48bd3db7838ca715bcfe09be59cef043241d7ef3", + "miniBlockHash": "be053e3ccb556eb39be6f388fb0ab5256d394d634887be06a6ed996560cbcae7", "nonce": 1196665, "round": 50, "value": "0", diff --git a/integrationtests/testdata/relayedTx/relayed-tx-source.json b/integrationtests/testdata/relayedTx/relayed-tx-source.json index c644e3e1..9c907820 100644 --- a/integrationtests/testdata/relayedTx/relayed-tx-source.json +++ b/integrationtests/testdata/relayedTx/relayed-tx-source.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "fed7c174a849c30b88c36a26453407f1b95970941d0872e603e641c5c804104a", + "miniBlockHash": "333145179043d02bb47bacd7a1af58640fcffae7be284cb30b2295b41159bf04", "nonce": 1196667, "round": 50, "value": "0", diff --git a/integrationtests/testdata/scCallIntraShard/claim-rewards.json b/integrationtests/testdata/scCallIntraShard/claim-rewards.json index 2124bb2c..6de4372a 100644 --- a/integrationtests/testdata/scCallIntraShard/claim-rewards.json +++ b/integrationtests/testdata/scCallIntraShard/claim-rewards.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "60b38b11110d28d1b361359f9688bb041bb9180219a612a83ff00dcc0db4d607", + "miniBlockHash": "b94c93965b411ffd412af1f52e3feab3b222d50e5f96fc69f9b6b8b53d926ac0", "nonce": 101, "round": 50, "value": "0", diff --git a/integrationtests/testdata/scCallIntraShard/sc-call-fail.json b/integrationtests/testdata/scCallIntraShard/sc-call-fail.json index 5e1cbab2..aaea4caf 100644 --- a/integrationtests/testdata/scCallIntraShard/sc-call-fail.json +++ b/integrationtests/testdata/scCallIntraShard/sc-call-fail.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "5d04f80b044352bfbbde123702323eae07fdd8ca77f24f256079006058b6e7b4", + "miniBlockHash": "367a92162b8a241d2b30253520e93021c8dd2247bd30d8a21108feb2827db45e", "nonce": 46, "round": 50, "value": "5000000000000000000", @@ -22,5 +22,8 @@ "hasScResults": true, "operation": "transfer", "isScCall": true, - "function": "delegate" + "function": "delegate", + "errorEvent": true, + "hasLogs": true, + "hasOperations": true } diff --git a/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-destination-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-destination-shard.json new file mode 100644 index 00000000..d0896ee1 --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-destination-shard.json @@ -0,0 +1,22 @@ +{ + "miniBlockHash": "49345577b344acd366143ad353904657307dc21f57c3817bf164975de3858459", + "nonce": 0, + "gasLimit": 0, + "gasPrice": 0, + "value": "0", + "valueNum": 0, + "sender": "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u", + "receiver": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "senderShard": 4294967295, + "receiverShard": 0, + "data": "QDAwQDU0NDU1MzU0NGU0NjU0MmQ2NDM5NjQzNTM0NjM=", + "prevTxHash": "736372576974684973737565", + "originalTxHash": "747857697468536343616c6c", + "callType": "2", + "timestamp": 5040, + "hasOperations": true, + "type": "unsigned", + "status": "success", + "operation": "transfer", + "hasLogs": true +} diff --git a/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-source.json b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-source.json new file mode 100644 index 00000000..5bc7cbb2 --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-callback-executed-on-source.json @@ -0,0 +1,20 @@ +{ + "miniBlockHash": "49345577b344acd366143ad353904657307dc21f57c3817bf164975de3858459", + "nonce": 0, + "gasLimit": 0, + "gasPrice": 0, + "value": "0", + "valueNum": 0, + "sender": "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u", + "receiver": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "senderShard": 4294967295, + "receiverShard": 0, + "data": "QDAwQDU0NDU1MzU0NGU0NjU0MmQ2NDM5NjQzNTM0NjM=", + "prevTxHash": "736372576974684973737565", + "originalTxHash": "747857697468536343616c6c", + "callType": "2", + "timestamp": 5040, + "type": "unsigned", + "status": "pending", + "operation": "transfer" +} diff --git a/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-destination-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-destination-shard.json new file mode 100644 index 00000000..448dfb9e --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-destination-shard.json @@ -0,0 +1,21 @@ +{ + "miniBlockHash": "5eb176de1812fdfda088c1764f07ef9f991ba6159de3096143a59185884aed4a", + "nonce": 0, + "gasLimit": 0, + "gasPrice": 0, + "value": "50000000000000000", + "valueNum": 0.05, + "sender": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "receiver": "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u", + "senderShard": 0, + "receiverShard": 4294967295, + "data": "aXNzdWVOb25GdW5naWJsZUA0ZDc5NTQ2NTczNzQ0ZTY2NzQ2NEA1NDQ1NTM1NDRlNDY1NEA2MzYxNmU0NjcyNjU2NTdhNjVANzQ3Mjc1NjVANjM2MTZlNTc2OTcwNjVANzQ3Mjc1NjVANjM2MTZlNTA2MTc1NzM2NUA3NDcyNzU2NUA2MzYxNmU0MzY4NjE2ZTY3NjU0Zjc3NmU2NTcyQDY2NjE2YzczNjVANjM2MTZlNTU3MDY3NzI2MTY0NjVANjY2MTZjNzM2NUA2MzYxNmU0MTY0NjQ1MzcwNjU2MzY5NjE2YzUyNmY2YzY1NzNANzQ3Mjc1NjVANThmNjM4", + "prevTxHash": "747857697468536343616c6c", + "originalTxHash": "747857697468536343616c6c", + "callType": "1", + "timestamp": 5040, + "type": "unsigned", + "status": "success", + "operation": "transfer", + "function": "issueNonFungible" +} diff --git a/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-source-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-source-shard.json new file mode 100644 index 00000000..3703f353 --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/scr-with-issue-executed-on-source-shard.json @@ -0,0 +1,21 @@ +{ + "miniBlockHash": "5eb176de1812fdfda088c1764f07ef9f991ba6159de3096143a59185884aed4a", + "nonce": 0, + "gasLimit": 0, + "gasPrice": 0, + "value": "50000000000000000", + "valueNum": 0.05, + "sender": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "receiver": "erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u", + "senderShard": 0, + "receiverShard": 4294967295, + "data": "aXNzdWVOb25GdW5naWJsZUA0ZDc5NTQ2NTczNzQ0ZTY2NzQ2NEA1NDQ1NTM1NDRlNDY1NEA2MzYxNmU0NjcyNjU2NTdhNjVANzQ3Mjc1NjVANjM2MTZlNTc2OTcwNjVANzQ3Mjc1NjVANjM2MTZlNTA2MTc1NzM2NUA3NDcyNzU2NUA2MzYxNmU0MzY4NjE2ZTY3NjU0Zjc3NmU2NTcyQDY2NjE2YzczNjVANjM2MTZlNTU3MDY3NzI2MTY0NjVANjY2MTZjNzM2NUA2MzYxNmU0MTY0NjQ1MzcwNjU2MzY5NjE2YzUyNmY2YzY1NzNANzQ3Mjc1NjVANThmNjM4", + "prevTxHash": "747857697468536343616c6c", + "originalTxHash": "747857697468536343616c6c", + "callType": "1", + "timestamp": 5040, + "type": "unsigned", + "status": "pending", + "operation": "transfer", + "function": "issueNonFungible" +} diff --git a/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-of-callback-on-destination-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-of-callback-on-destination-shard.json new file mode 100644 index 00000000..ce08d35c --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-of-callback-on-destination-shard.json @@ -0,0 +1,27 @@ +{ + "miniBlockHash": "ace0c639c9687eb82e5d304502668e937cf220610b5a3881ca7e210802e64ae1", + "nonce": 46, + "round": 50, + "value": "50000000000000000", + "valueNum": 0.05, + "receiver": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "sender": "erd148m2sx48mfm8322c2kpfmgj78g5j0x7r6z6y4z8j28qk45a74nwq5pq2ts", + "receiverShard": 0, + "senderShard": 0, + "gasPrice": 1000000000, + "gasLimit": 75000000, + "gasUsed": 75000000, + "fee": "867810000000000", + "feeNum": 8.6781E-4, + "initialPaidFee": "867810000000000", + "data": "aXNzdWVUb2tlbkA0RDc5NTQ2NTczNzQ0RTY2NzQ2NEA1NDQ1NTM1NDRFNDY1NA==", + "signature": "", + "timestamp": 5040, + "status": "fail", + "searchOrder": 0, + "hasScResults": true, + "isScCall": true, + "operation": "transfer", + "function": "issueToken", + "errorEvent": true +} diff --git a/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-on-source-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-on-source-shard.json new file mode 100644 index 00000000..6ea6fd40 --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/tx-after-execution-on-source-shard.json @@ -0,0 +1,27 @@ +{ + "miniBlockHash": "ace0c639c9687eb82e5d304502668e937cf220610b5a3881ca7e210802e64ae1", + "nonce": 46, + "round": 50, + "value": "50000000000000000", + "valueNum": 0.05, + "receiver": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "sender": "erd148m2sx48mfm8322c2kpfmgj78g5j0x7r6z6y4z8j28qk45a74nwq5pq2ts", + "receiverShard": 0, + "senderShard": 0, + "gasPrice": 1000000000, + "gasLimit": 75000000, + "gasUsed": 75000000, + "fee": "867810000000000", + "feeNum": 0.00086781, + "initialPaidFee": "867810000000000", + "data": "aXNzdWVUb2tlbkA0RDc5NTQ2NTczNzQ0RTY2NzQ2NEA1NDQ1NTM1NDRFNDY1NA==", + "signature": "", + "timestamp": 5040, + "status": "success", + "searchOrder": 0, + "hasScResults": true, + "isScCall": true, + "operation": "transfer", + "function": "issueToken" +} + diff --git a/integrationtests/testdata/scCallWithIssueEsdt/tx-in-op-index-execution-of-callback-on-destination-shard.json b/integrationtests/testdata/scCallWithIssueEsdt/tx-in-op-index-execution-of-callback-on-destination-shard.json new file mode 100644 index 00000000..305833e8 --- /dev/null +++ b/integrationtests/testdata/scCallWithIssueEsdt/tx-in-op-index-execution-of-callback-on-destination-shard.json @@ -0,0 +1,28 @@ +{ + "miniBlockHash": "ace0c639c9687eb82e5d304502668e937cf220610b5a3881ca7e210802e64ae1", + "nonce": 46, + "round": 50, + "value": "50000000000000000", + "valueNum": 0.05, + "receiver": "erd1qqqqqqqqqqqqqpgqahumqen35dr9k4rmcnd70mqt5t4mt7ey4nwqwjme9g", + "sender": "erd148m2sx48mfm8322c2kpfmgj78g5j0x7r6z6y4z8j28qk45a74nwq5pq2ts", + "receiverShard": 0, + "senderShard": 0, + "gasPrice": 1000000000, + "gasLimit": 75000000, + "gasUsed": 75000000, + "fee": "867810000000000", + "feeNum": 8.6781E-4, + "initialPaidFee": "867810000000000", + "data": "aXNzdWVUb2tlbkA0RDc5NTQ2NTczNzQ0RTY2NzQ2NEA1NDQ1NTM1NDRFNDY1NA==", + "signature": "", + "timestamp": 5040, + "status": "fail", + "searchOrder": 0, + "hasScResults": true, + "isScCall": true, + "operation": "transfer", + "function": "issueToken", + "errorEvent": true, + "type": "normal" +} diff --git a/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner-second.json b/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner-second.json new file mode 100644 index 00000000..eaadb107 --- /dev/null +++ b/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner-second.json @@ -0,0 +1,27 @@ +{ + "deployTxHash": "6831", + "deployer": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "currentOwner": "erd1y78ds2tvzw6ntcggldjld2vk96wgq0mj47mk6auny0nkvn242e3sd4qz7m", + "initialCodeHash": "Y29kZUhhc2g=", + "timestamp": 5040, + "upgrades": [ + { + "upgradeTxHash": "6832", + "codeHash": "c2Vjb25kQ29kZUhhc2g=", + "upgrader": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "timestamp": 6000 + } + ], + "owners": [ + { + "address": "erd1d942l8w4yvgjffpqacs8vdwl0mndsv0zn0uxa80hxc3xmq4477eqnyw3dh", + "txHash": "6833", + "timestamp": 7000 + }, + { + "address": "erd1y78ds2tvzw6ntcggldjld2vk96wgq0mj47mk6auny0nkvn242e3sd4qz7m", + "txHash": "6834", + "timestamp": 8000 + } + ] +} diff --git a/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner.json b/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner.json new file mode 100644 index 00000000..e4824dd0 --- /dev/null +++ b/integrationtests/testdata/scDeploy/deploy-after-upgrade-and-change-owner.json @@ -0,0 +1,22 @@ +{ + "deployTxHash": "6831", + "deployer": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "currentOwner": "erd1d942l8w4yvgjffpqacs8vdwl0mndsv0zn0uxa80hxc3xmq4477eqnyw3dh", + "initialCodeHash": "Y29kZUhhc2g=", + "timestamp": 5040, + "upgrades": [ + { + "upgradeTxHash": "6832", + "codeHash": "c2Vjb25kQ29kZUhhc2g=", + "upgrader": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "timestamp": 6000 + } + ], + "owners": [ + { + "address": "erd1d942l8w4yvgjffpqacs8vdwl0mndsv0zn0uxa80hxc3xmq4477eqnyw3dh", + "txHash": "6833", + "timestamp": 7000 + } + ] +} diff --git a/integrationtests/testdata/scDeploy/deploy-after-upgrade.json b/integrationtests/testdata/scDeploy/deploy-after-upgrade.json new file mode 100644 index 00000000..687b1969 --- /dev/null +++ b/integrationtests/testdata/scDeploy/deploy-after-upgrade.json @@ -0,0 +1,16 @@ +{ + "deployTxHash": "6831", + "deployer": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "currentOwner": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "initialCodeHash": "Y29kZUhhc2g=", + "timestamp": 5040, + "upgrades": [ + { + "upgradeTxHash": "6832", + "codeHash": "c2Vjb25kQ29kZUhhc2g=", + "upgrader": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "timestamp": 6000 + } + ], + "owners": [] +} diff --git a/integrationtests/testdata/scDeploy/deploy.json b/integrationtests/testdata/scDeploy/deploy.json new file mode 100644 index 00000000..4f8bd080 --- /dev/null +++ b/integrationtests/testdata/scDeploy/deploy.json @@ -0,0 +1,9 @@ +{ + "deployTxHash": "6831", + "deployer": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "currentOwner": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "timestamp": 5040, + "initialCodeHash": "Y29kZUhhc2g=", + "upgrades": [], + "owners": [] +} diff --git a/integrationtests/testdata/scDeploy/tx-sc-deploy.json b/integrationtests/testdata/scDeploy/tx-sc-deploy.json new file mode 100644 index 00000000..03a79f1b --- /dev/null +++ b/integrationtests/testdata/scDeploy/tx-sc-deploy.json @@ -0,0 +1,24 @@ +{ + "miniBlockHash": "1703039e791823d0e722a0d3df76c87786fb5074d2b333c98a870335b418a97d", + "nonce": 1, + "round": 50, + "value": "0", + "valueNum": 0, + "receiver": "erd1qqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq6gq4hu", + "sender": "erd12m3x8jp6dl027pj5f2nw6ght2cyhhjfrs86cdwsa8xn83r375qfqrwpdx0", + "receiverShard": 2, + "senderShard": 2, + "gasPrice": 2000000, + "gasLimit": 1000000000, + "gasUsed": 1130820, + "fee": "764698200000000", + "feeNum": 0.0007646982, + "initialPaidFee": "773390000000000", + "data": "MDA2MTczNmQwMTAwMDAwMDAxMGQwMzYwMDAwMDYwMDAwMTdmNjAwMjdmN2YwMDAyM2UwMzAzNjU2ZTc2MGY2NzY1NzQ0ZTc1NmQ0MTcyNjc3NTZkNjU2ZTc0NzMwMDAxMDM2NTZlNzYwYjczNjk2NzZlNjE2YzQ1NzI3MjZmNzIwMDAyMDM2NTZlNzYwZTYzNjg2NTYzNmI0ZTZmNTA2MTc5NmQ2NTZlNzQwMDAwMDMwMzAyMDAwMDA1MDMwMTAwMDMwNjBmMDI3ZjAwNDE5OTgwMDgwYjdmMDA0MWEwODAwODBiMDczNzA1MDY2ZDY1NmQ2ZjcyNzkwMjAwMDQ2OTZlNjk3NDAwMDMwODYzNjE2YzZjNDI2MTYzNmIwMDA0MGE1ZjVmNjQ2MTc0NjE1ZjY1NmU2NDAzMDAwYjVmNWY2ODY1NjE3MDVmNjI2MTczNjUwMzAxMGExODAyMTIwMDEwMDIxMDAwMDQ0MDQxODA4MDA4NDExOTEwMDEwMDBiMGIwMzAwMDEwYjBiMjEwMTAwNDE4MDgwMDgwYjE5Nzc3MjZmNmU2NzIwNmU3NTZkNjI2NTcyMjA2ZjY2MjA2MTcyNjc3NTZkNjU2ZTc0NzNAMDUwMEAwNTAy", + "signature": "", + "timestamp": 5040, + "status": "success", + "searchOrder": 0, + "isScCall": true, + "operation": "scDeploy" +} diff --git a/integrationtests/testdata/transactions/move-balance.json b/integrationtests/testdata/transactions/move-balance.json index 4a2bb764..266cfe1c 100644 --- a/integrationtests/testdata/transactions/move-balance.json +++ b/integrationtests/testdata/transactions/move-balance.json @@ -1,5 +1,5 @@ { - "miniBlockHash": "24c374c9405540e88a36959ea83eede6ad50f6872f82d2e2a2280975615e1811", + "miniBlockHash": "ef17c864ce0ceca1f51171c79a50317d40dd2a71c016761332ed15ddce9b2cf6", "nonce": 1, "round": 50, "value": "1234", diff --git a/integrationtests/transactions_test.go b/integrationtests/transactions_test.go index 05574386..48e19b21 100644 --- a/integrationtests/transactions_test.go +++ b/integrationtests/transactions_test.go @@ -3,11 +3,11 @@ package integrationtests import ( + "context" "encoding/hex" "math/big" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" @@ -39,7 +39,7 @@ func TestElasticIndexerSaveTransactions(t *testing.T) { }, }, } - tx := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + tx := &transaction.Transaction{ Nonce: 1, SndAddr: decodeAddress("erd1w7jyzuj6cv4ngw8luhlkakatjpmjh3ql95lmxphd3vssc4vpymks6k5th7"), RcvAddr: decodeAddress("erd1ahmy0yjhjg87n755yv99nzla22zzwfud55sa69gk3anyxyyucq9q2hgxww"), @@ -47,19 +47,29 @@ func TestElasticIndexerSaveTransactions(t *testing.T) { GasPrice: 1000000000, Data: []byte("transfer"), Value: big.NewInt(1234), - }, 62000, big.NewInt(62000000000000)) - tx.SetInitialPaidFee(big.NewInt(62080000000000)) - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash): tx, + } + + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 62000, + Fee: big.NewInt(62000000000000), + InitialPaidFee: big.NewInt(62080000000000), + }, + ExecutionOrder: 0, + } + + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash): txInfo, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{hex.EncodeToString(txHash)} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerData.TransactionsIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerData.TransactionsIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, diff --git a/integrationtests/updateNFT_test.go b/integrationtests/updateNFT_test.go index ba8db04e..723fddfc 100644 --- a/integrationtests/updateNFT_test.go +++ b/integrationtests/updateNFT_test.go @@ -4,12 +4,13 @@ package integrationtests import ( "bytes" + "context" + "encoding/hex" "encoding/json" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/esdt" "github.com/multiversx/mx-chain-core-go/data/outport" @@ -44,10 +45,11 @@ func TestNFTUpdateMetadata(t *testing.T) { // CREATE NFT data address := "erd1w7jyzuj6cv4ngw8luhlkakatjpmjh3ql95lmxphd3vssc4vpymks6k5th7" - pool := &outport.Pool{ - Logs: []*coreData.LogData{ + pool := &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -57,24 +59,24 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids := []string{"NFT-abcd-0e"} genericResponse := &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token.json"), string(genericResponse.Docs[0].Source)) // Add URIS 1 - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -84,18 +86,18 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) // Add URIS 2 --- results should be the same - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -105,24 +107,24 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) // Update attributes 1 ids = []string{"NFT-abcd-0e"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token-after-add-uris.json"), string(genericResponse.Docs[0].Source)) - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -132,25 +134,25 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NFT-abcd-0e"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token-after-update-attributes.json"), string(genericResponse.Docs[0].Source)) // Update attributes 2 - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -160,24 +162,24 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NFT-abcd-0e"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token-after-update-attributes-second.json"), string(genericResponse.Docs[0].Source)) // Freeze nft - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -187,23 +189,23 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NFT-abcd-0e"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token-after-freeze.json"), string(genericResponse.Docs[0].Source)) // UnFreeze nft - pool = &outport.Pool{ - Logs: []*coreData.LogData{ + pool = &outport.TransactionPool{ + Logs: []*outport.LogData{ { - LogHandler: &transaction.Log{ + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ Events: []*transaction.Event{ { Address: decodeAddress(address), @@ -213,15 +215,14 @@ func TestNFTUpdateMetadata(t *testing.T) { nil, }, }, - TxHash: "h1", }, }, } - err = esProc.SaveTransactions(body, header, pool, nil, false, testNumOfShards) + err = esProc.SaveTransactions(createOutportBlockWithHeader(body, header, pool, nil, testNumOfShards)) require.Nil(t, err) ids = []string{"NFT-abcd-0e"} genericResponse = &GenericResponse{} - err = esClient.DoMultiGet(ids, indexerdata.TokensIndex, true, genericResponse) + err = esClient.DoMultiGet(context.Background(), ids, indexerdata.TokensIndex, true, genericResponse) require.Nil(t, err) require.JSONEq(t, readExpectedResult("./testdata/updateNFT/token-after-un-freeze.json"), string(genericResponse.Docs[0].Source)) } diff --git a/integrationtests/utils.go b/integrationtests/utils.go index 9cfbfc28..6c603cdc 100644 --- a/integrationtests/utils.go +++ b/integrationtests/utils.go @@ -22,9 +22,10 @@ import ( var ( log = logger.GetOrCreate("integration-tests") - pubKeyConverter, _ = pubkeyConverter.NewBech32PubkeyConverter(32, log) + pubKeyConverter, _ = pubkeyConverter.NewBech32PubkeyConverter(32, addressPrefix) ) +//nolint func setLogLevelDebug() { _ = logger.SetLogLevel("process:DEBUG") } @@ -57,13 +58,14 @@ func CreateElasticProcessor( DBClient: esClient, EnabledIndexes: []string{dataindexer.TransactionsIndex, dataindexer.LogsIndex, dataindexer.AccountsESDTIndex, dataindexer.ScResultsIndex, dataindexer.ReceiptsIndex, dataindexer.BlockIndex, dataindexer.AccountsIndex, dataindexer.TokensIndex, dataindexer.TagsIndex, - dataindexer.OperationsIndex, dataindexer.DelegatorsIndex, dataindexer.ESDTsIndex}, + dataindexer.OperationsIndex, dataindexer.DelegatorsIndex, dataindexer.ESDTsIndex, dataindexer.SCDeploysIndex, dataindexer.MiniblocksIndex}, Denomination: 18, } return factory.CreateElasticProcessor(args) } +//nolint func readExpectedResult(path string) string { jsonFile, _ := os.Open(path) byteValue, _ := ioutil.ReadAll(jsonFile) @@ -71,6 +73,7 @@ func readExpectedResult(path string) string { return string(byteValue) } +//nolint func getElementFromSlice(path string, index int) string { fileBytes := readExpectedResult(path) slice := make([]map[string]interface{}, 0) diff --git a/metrics/dtos.go b/metrics/dtos.go new file mode 100644 index 00000000..3caaa05f --- /dev/null +++ b/metrics/dtos.go @@ -0,0 +1,12 @@ +package metrics + +import "time" + +// ArgsAddIndexingData holds all the data needed for indexing metrics +type ArgsAddIndexingData struct { + StatusCode int + GotError bool + MessageLen uint64 + Topic string + Duration time.Duration +} diff --git a/metrics/prometheusMetrics.go b/metrics/prometheusMetrics.go new file mode 100644 index 00000000..8baccf42 --- /dev/null +++ b/metrics/prometheusMetrics.go @@ -0,0 +1,86 @@ +package metrics + +import ( + "bytes" + "strconv" + + dto "github.com/prometheus/client_model/go" + "github.com/prometheus/common/expfmt" + "google.golang.org/protobuf/proto" +) + +const ( + operationName = "operation" + shardIDName = "shardID" + errorCodeName = "errorCode" +) + +func counterMetric(metricName, operation string, shardIDStr string, count uint64) string { + metricFamily := &dto.MetricFamily{ + Name: proto.String(metricName), + Type: dto.MetricType_COUNTER.Enum(), + Metric: []*dto.Metric{ + { + Label: []*dto.LabelPair{ + { + Name: proto.String(operationName), + Value: proto.String(operation), + }, + { + Name: proto.String(shardIDName), + Value: proto.String(shardIDStr), + }, + }, + Counter: &dto.Counter{ + Value: proto.Float64(float64(count)), + }, + }, + }, + } + + return promMetricAsString(metricFamily) +} + +func errorsMetric(metricName, operation string, shardIDStr string, errorsCount map[int]uint64) string { + metricFamily := &dto.MetricFamily{ + Name: proto.String(metricName), + Type: dto.MetricType_GAUGE.Enum(), + Metric: make([]*dto.Metric, 0, len(errorsCount)), + } + + for code, count := range errorsCount { + m := &dto.Metric{ + Label: []*dto.LabelPair{ + { + Name: proto.String(operationName), + Value: proto.String(operation), + }, + { + Name: proto.String(shardIDName), + Value: proto.String(shardIDStr), + }, + { + Name: proto.String(errorCodeName), + Value: proto.String(strconv.Itoa(code)), + }, + }, + Gauge: &dto.Gauge{ + Value: proto.Float64(float64(count)), + }, + } + + metricFamily.Metric = append(metricFamily.Metric, m) + } + + return promMetricAsString(metricFamily) +} + +func promMetricAsString(metric *dto.MetricFamily) string { + out := bytes.NewBuffer(make([]byte, 0)) + _, err := expfmt.MetricFamilyToText(out, metric) + if err != nil { + return "" + } + + return out.String() + "\n" +} diff --git a/metrics/statusMetrics.go b/metrics/statusMetrics.go new file mode 100644 index 00000000..06222ff4 --- /dev/null +++ b/metrics/statusMetrics.go @@ -0,0 +1,118 @@ +package metrics + +import ( + "bytes" + "net/http" + "strings" + "sync" + "unicode" + + "github.com/multiversx/mx-chain-es-indexer-go/core/request" +) + +const ( + operationCount = "operations_count" + errorsCount = "errors_count" + totalTime = "total_time" + totalData = "total_data" + requestsErrors = "requests_errors" +) + +type statusMetrics struct { + metrics map[string]*request.MetricsResponse + mut sync.RWMutex +} + +// NewStatusMetrics will return an instance of the statusMetrics +func NewStatusMetrics() *statusMetrics { + return &statusMetrics{ + metrics: make(map[string]*request.MetricsResponse), + } +} + +// AddIndexingData will add the indexing data for the give topic +func (sm *statusMetrics) AddIndexingData(args ArgsAddIndexingData) { + sm.mut.Lock() + defer sm.mut.Unlock() + + topic := camelToSnake(args.Topic) + _, found := sm.metrics[topic] + if !found { + sm.metrics[topic] = &request.MetricsResponse{ + ErrorsCount: map[int]uint64{}, + } + } + + sm.metrics[topic].OperationsCount++ + sm.metrics[topic].TotalIndexingTime += args.Duration + sm.metrics[topic].TotalData += args.MessageLen + + isErrorCode := args.StatusCode >= http.StatusBadRequest + if args.GotError || isErrorCode { + sm.metrics[topic].TotalErrorsCount++ + } + if isErrorCode { + sm.metrics[topic].ErrorsCount[args.StatusCode]++ + } +} + +// GetMetrics returns the metrics map +func (sm *statusMetrics) GetMetrics() map[string]*request.MetricsResponse { + sm.mut.RLock() + defer sm.mut.RUnlock() + + return sm.getAllUnprotected() +} + +// GetMetricsForPrometheus returns the metrics in a prometheus format +func (sm *statusMetrics) GetMetricsForPrometheus() string { + sm.mut.RLock() + metrics := sm.getAllUnprotected() + sm.mut.RUnlock() + + stringBuilder := strings.Builder{} + + for topicWithShardID, metricsData := range metrics { + topic, shardIDStr := request.SplitTopicAndShardID(topicWithShardID) + stringBuilder.WriteString(counterMetric(topic, totalData, shardIDStr, metricsData.TotalData)) + stringBuilder.WriteString(counterMetric(topic, errorsCount, shardIDStr, metricsData.TotalErrorsCount)) + stringBuilder.WriteString(counterMetric(topic, operationCount, shardIDStr, metricsData.OperationsCount)) + stringBuilder.WriteString(counterMetric(topic, totalTime, shardIDStr, uint64(metricsData.TotalIndexingTime.Milliseconds()))) + stringBuilder.WriteString(errorsMetric(topic, requestsErrors, shardIDStr, metricsData.ErrorsCount)) + } + + promMetricsOutput := stringBuilder.String() + + return promMetricsOutput +} + +func (sm *statusMetrics) getAllUnprotected() map[string]*request.MetricsResponse { + newMap := make(map[string]*request.MetricsResponse) + for key, value := range sm.metrics { + newMap[key] = value + } + + return newMap +} + +// IsInterfaceNil returns true if there is no value under the interface +func (sm *statusMetrics) IsInterfaceNil() bool { + return sm == nil +} + +func camelToSnake(camelStr string) string { + var snakeBuf bytes.Buffer + + for i, r := range camelStr { + if unicode.IsUpper(r) { + if i > 0 && unicode.IsLower(rune(camelStr[i-1])) { + snakeBuf.WriteRune('_') + } + snakeBuf.WriteRune(unicode.ToLower(r)) + } else { + snakeBuf.WriteRune(r) + } + } + + return snakeBuf.String() +} diff --git a/metrics/statusMetrics_test.go b/metrics/statusMetrics_test.go new file mode 100644 index 00000000..08c05d4a --- /dev/null +++ b/metrics/statusMetrics_test.go @@ -0,0 +1,71 @@ +package metrics + +import ( + "net/http" + "testing" + + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" + "github.com/stretchr/testify/require" +) + +func TestStatusMetrics_AddIndexingDataAndGetMetrics(t *testing.T) { + t.Parallel() + + statusMetricsHandler := NewStatusMetrics() + require.False(t, statusMetricsHandler.IsInterfaceNil()) + + topic1 := "test1_0" + statusMetricsHandler.AddIndexingData(ArgsAddIndexingData{ + GotError: true, + MessageLen: 100, + Topic: topic1, + Duration: 12, + StatusCode: http.StatusBadRequest, + }) + statusMetricsHandler.AddIndexingData(ArgsAddIndexingData{ + GotError: false, + MessageLen: 222, + Topic: topic1, + Duration: 15, + }) + + metrics := statusMetricsHandler.GetMetrics() + require.Equal(t, &request.MetricsResponse{ + OperationsCount: 2, + TotalErrorsCount: 1, + TotalIndexingTime: 27, + TotalData: 322, + ErrorsCount: map[int]uint64{ + http.StatusBadRequest: 1, + }, + }, metrics[topic1]) + + prometheusMetrics := statusMetricsHandler.GetMetricsForPrometheus() + require.Equal(t, `# TYPE test1 counter +test1{operation="total_data",shardID="0"} 322 + +# TYPE test1 counter +test1{operation="errors_count",shardID="0"} 1 + +# TYPE test1 counter +test1{operation="operations_count",shardID="0"} 2 + +# TYPE test1 counter +test1{operation="total_time",shardID="0"} 0 + +# TYPE test1 gauge +test1{operation="requests_errors",shardID="0",errorCode="400"} 1 + +`, prometheusMetrics) +} + +func TestCamelCaseToSnakeCase(t *testing.T) { + t.Parallel() + + require.Equal(t, "settings", camelToSnake(outport.TopicSettings)) + require.Equal(t, "save_validators_pub_keys", camelToSnake(outport.TopicSaveValidatorsPubKeys)) + require.Equal(t, "000000000000000", camelToSnake("000000000000000")) + require.Equal(t, "one_one_one", camelToSnake("One_One_One")) + require.Equal(t, "req_block", camelToSnake("req_block")) +} diff --git a/mock/blockContainerStub.go b/mock/blockContainerStub.go new file mode 100644 index 00000000..e74643de --- /dev/null +++ b/mock/blockContainerStub.go @@ -0,0 +1,20 @@ +package mock + +import ( + "github.com/multiversx/mx-chain-core-go/core" + "github.com/multiversx/mx-chain-core-go/data/block" +) + +// BlockContainerStub - +type BlockContainerStub struct { + GetCalled func(headerType core.HeaderType) (block.EmptyBlockCreator, error) +} + +// Get - +func (bcs *BlockContainerStub) Get(headerType core.HeaderType) (block.EmptyBlockCreator, error) { + if bcs.GetCalled != nil { + return bcs.GetCalled(headerType) + } + + return nil, nil +} diff --git a/mock/databaseWriterStub.go b/mock/databaseWriterStub.go index 3d468878..9a41e0ff 100644 --- a/mock/databaseWriterStub.go +++ b/mock/databaseWriterStub.go @@ -2,13 +2,11 @@ package mock import ( "bytes" - - "github.com/elastic/go-elasticsearch/v7/esapi" + "context" ) // DatabaseWriterStub - type DatabaseWriterStub struct { - DoRequestCalled func(req *esapi.IndexRequest) error DoBulkRequestCalled func(buff *bytes.Buffer, index string) error DoQueryRemoveCalled func(index string, body *bytes.Buffer) error DoMultiGetCalled func(ids []string, index string, withSource bool, response interface{}) error @@ -17,33 +15,25 @@ type DatabaseWriterStub struct { } // UpdateByQuery - -func (dwm *DatabaseWriterStub) UpdateByQuery(_ string, _ *bytes.Buffer) error { +func (dwm *DatabaseWriterStub) UpdateByQuery(_ context.Context, _ string, _ *bytes.Buffer) error { return nil } // DoCountRequest - -func (dwm *DatabaseWriterStub) DoCountRequest(_ string, _ []byte) (uint64, error) { +func (dwm *DatabaseWriterStub) DoCountRequest(_ context.Context, _ string, _ []byte) (uint64, error) { return 0, nil } // DoScrollRequest - -func (dwm *DatabaseWriterStub) DoScrollRequest(index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error { +func (dwm *DatabaseWriterStub) DoScrollRequest(_ context.Context, index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error { if dwm.DoScrollRequestCalled != nil { return dwm.DoScrollRequestCalled(index, body, withSource, handlerFunc) } return nil } -// DoRequest - -func (dwm *DatabaseWriterStub) DoRequest(req *esapi.IndexRequest) error { - if dwm.DoRequestCalled != nil { - return dwm.DoRequestCalled(req) - } - return nil -} - // DoBulkRequest - -func (dwm *DatabaseWriterStub) DoBulkRequest(buff *bytes.Buffer, index string) error { +func (dwm *DatabaseWriterStub) DoBulkRequest(_ context.Context, buff *bytes.Buffer, index string) error { if dwm.DoBulkRequestCalled != nil { return dwm.DoBulkRequestCalled(buff, index) } @@ -51,7 +41,7 @@ func (dwm *DatabaseWriterStub) DoBulkRequest(buff *bytes.Buffer, index string) e } // DoMultiGet - -func (dwm *DatabaseWriterStub) DoMultiGet(hashes []string, index string, withSource bool, response interface{}) error { +func (dwm *DatabaseWriterStub) DoMultiGet(_ context.Context, hashes []string, index string, withSource bool, response interface{}) error { if dwm.DoMultiGetCalled != nil { return dwm.DoMultiGetCalled(hashes, index, withSource, response) } @@ -60,7 +50,7 @@ func (dwm *DatabaseWriterStub) DoMultiGet(hashes []string, index string, withSou } // DoQueryRemove - -func (dwm *DatabaseWriterStub) DoQueryRemove(index string, body *bytes.Buffer) error { +func (dwm *DatabaseWriterStub) DoQueryRemove(_ context.Context, index string, body *bytes.Buffer) error { if dwm.DoQueryRemoveCalled != nil { return dwm.DoQueryRemoveCalled(index, body) } diff --git a/mock/dbAccountsHandlerStub.go b/mock/dbAccountsHandlerStub.go index 84899938..35747c27 100644 --- a/mock/dbAccountsHandlerStub.go +++ b/mock/dbAccountsHandlerStub.go @@ -1,7 +1,7 @@ package mock import ( - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-es-indexer-go/data" ) @@ -12,7 +12,7 @@ type DBAccountsHandlerStub struct { } // GetAccounts - -func (dba *DBAccountsHandlerStub) GetAccounts(_ map[string]*outport.AlteredAccount) ([]*data.Account, []*data.AccountESDT) { +func (dba *DBAccountsHandlerStub) GetAccounts(_ map[string]*alteredAccount.AlteredAccount) ([]*data.Account, []*data.AccountESDT) { return nil, nil } @@ -59,7 +59,7 @@ func (dba *DBAccountsHandlerStub) SerializeNFTCreateInfo(_ []*data.TokenInfo, _ } // PutTokenMedataDataInTokens - -func (dba *DBAccountsHandlerStub) PutTokenMedataDataInTokens(_ []*data.TokenInfo, _ map[string]*outport.AlteredAccount) { +func (dba *DBAccountsHandlerStub) PutTokenMedataDataInTokens(_ []*data.TokenInfo, _ map[string]*alteredAccount.AlteredAccount) { } // SerializeTypeForProvidedIDs - diff --git a/mock/dbTransactionsHandlerStub.go b/mock/dbTransactionsHandlerStub.go index 9ec841ba..c4aff654 100644 --- a/mock/dbTransactionsHandlerStub.go +++ b/mock/dbTransactionsHandlerStub.go @@ -9,7 +9,7 @@ import ( // DBTransactionProcessorStub - type DBTransactionProcessorStub struct { - PrepareTransactionsForDatabaseCalled func(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool) *data.PreparedResults + PrepareTransactionsForDatabaseCalled func(mbs []*block.MiniBlock, header coreData.HeaderHandler, pool *outport.TransactionPool) *data.PreparedResults SerializeReceiptsCalled func(recs []*data.Receipt, buffSlice *data.BufferSlice, index string) error SerializeScResultsCalled func(scrs []*data.ScResult, buffSlice *data.BufferSlice, index string) error } @@ -20,9 +20,9 @@ func (tps *DBTransactionProcessorStub) SerializeTransactionsFeeData(_ map[string } // PrepareTransactionsForDatabase - -func (tps *DBTransactionProcessorStub) PrepareTransactionsForDatabase(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool, _ bool, _ uint32) *data.PreparedResults { +func (tps *DBTransactionProcessorStub) PrepareTransactionsForDatabase(mbs []*block.MiniBlock, header coreData.HeaderHandler, pool *outport.TransactionPool, _ bool, _ uint32) *data.PreparedResults { if tps.PrepareTransactionsForDatabaseCalled != nil { - return tps.PrepareTransactionsForDatabaseCalled(body, header, pool) + return tps.PrepareTransactionsForDatabaseCalled(mbs, header, pool) } return nil @@ -43,7 +43,7 @@ func (tps *DBTransactionProcessorStub) SerializeReceipts(recs []*data.Receipt, b } // SerializeTransactions - -func (tps *DBTransactionProcessorStub) SerializeTransactions(_ []*data.Transaction, _ map[string]string, _ uint32, _ *data.BufferSlice, _ string) error { +func (tps *DBTransactionProcessorStub) SerializeTransactions(_ []*data.Transaction, _ map[string]*outport.StatusInfo, _ uint32, _ *data.BufferSlice, _ string) error { return nil } diff --git a/mock/dispatcherStub.go b/mock/dispatcherStub.go deleted file mode 100644 index 3dd089cc..00000000 --- a/mock/dispatcherStub.go +++ /dev/null @@ -1,39 +0,0 @@ -package mock - -import ( - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" -) - -// DispatcherMock - -type DispatcherMock struct { - StartIndexDataCalled func() - CloseCalled func() error - AddCalled func(item workItems.WorkItemHandler) -} - -// StartIndexData - -func (dm *DispatcherMock) StartIndexData() { - if dm.StartIndexDataCalled != nil { - dm.StartIndexDataCalled() - } -} - -// Close - -func (dm *DispatcherMock) Close() error { - if dm.CloseCalled != nil { - return dm.CloseCalled() - } - return nil -} - -// Add - -func (dm *DispatcherMock) Add(item workItems.WorkItemHandler) { - if dm.AddCalled != nil { - dm.AddCalled(item) - } -} - -// IsInterfaceNil returns true if there is no value under the interface -func (dm *DispatcherMock) IsInterfaceNil() bool { - return dm == nil -} diff --git a/mock/elasticProcessorStub.go b/mock/elasticProcessorStub.go index a49eedd3..3d01b732 100644 --- a/mock/elasticProcessorStub.go +++ b/mock/elasticProcessorStub.go @@ -4,30 +4,20 @@ import ( coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-es-indexer-go/data" ) // ElasticProcessorStub - type ElasticProcessorStub struct { - SaveHeaderCalled func( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - txsSize int, - pool *outport.Pool, - ) error + SaveHeaderCalled func(outportBlockWithHeader *outport.OutportBlockWithHeader) error RemoveHeaderCalled func(header coreData.HeaderHandler) error RemoveMiniblocksCalled func(header coreData.HeaderHandler, body *block.Body) error RemoveTransactionsCalled func(header coreData.HeaderHandler, body *block.Body) error - SaveMiniblocksCalled func(header coreData.HeaderHandler, body *block.Body) error - SaveTransactionsCalled func(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount) error - SaveValidatorsRatingCalled func(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error - SaveRoundsInfoCalled func(infos []*data.RoundInfo) error - SaveShardValidatorsPubKeysCalled func(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error - SaveAccountsCalled func(timestamp uint64, acc []*data.Account) error + SaveMiniblocksCalled func(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) error + SaveTransactionsCalled func(outportBlockWithHeader *outport.OutportBlockWithHeader) error + SaveValidatorsRatingCalled func(validatorsRating *outport.ValidatorsRating) error + SaveRoundsInfoCalled func(infos *outport.RoundsInfo) error + SaveShardValidatorsPubKeysCalled func(validators *outport.ValidatorsPubKeys) error + SaveAccountsCalled func(accountsData *outport.Accounts) error RemoveAccountsESDTCalled func(headerTimestamp uint64) error } @@ -41,17 +31,9 @@ func (eim *ElasticProcessorStub) RemoveAccountsESDT(headerTimestamp uint64, _ ui } // SaveHeader - -func (eim *ElasticProcessorStub) SaveHeader( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - txsSize int, - pool *outport.Pool) error { +func (eim *ElasticProcessorStub) SaveHeader(obh *outport.OutportBlockWithHeader) error { if eim.SaveHeaderCalled != nil { - return eim.SaveHeaderCalled(headerHash, header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize, pool) + return eim.SaveHeaderCalled(obh) } return nil } @@ -81,31 +63,31 @@ func (eim *ElasticProcessorStub) RemoveTransactions(header coreData.HeaderHandle } // SaveMiniblocks - -func (eim *ElasticProcessorStub) SaveMiniblocks(header coreData.HeaderHandler, body *block.Body) error { +func (eim *ElasticProcessorStub) SaveMiniblocks(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) error { if eim.SaveMiniblocksCalled != nil { - return eim.SaveMiniblocksCalled(header, body) + return eim.SaveMiniblocksCalled(header, miniBlocks) } return nil } // SaveTransactions - -func (eim *ElasticProcessorStub) SaveTransactions(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount, _ bool, _ uint32) error { +func (eim *ElasticProcessorStub) SaveTransactions(outportBlockWithHeader *outport.OutportBlockWithHeader) error { if eim.SaveTransactionsCalled != nil { - return eim.SaveTransactionsCalled(body, header, pool, coreAlteredAccounts) + return eim.SaveTransactionsCalled(outportBlockWithHeader) } return nil } // SaveValidatorsRating - -func (eim *ElasticProcessorStub) SaveValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error { +func (eim *ElasticProcessorStub) SaveValidatorsRating(validatorsRating *outport.ValidatorsRating) error { if eim.SaveValidatorsRatingCalled != nil { - return eim.SaveValidatorsRatingCalled(index, validatorsRatingInfo) + return eim.SaveValidatorsRatingCalled(validatorsRating) } return nil } // SaveRoundsInfo - -func (eim *ElasticProcessorStub) SaveRoundsInfo(info []*data.RoundInfo) error { +func (eim *ElasticProcessorStub) SaveRoundsInfo(info *outport.RoundsInfo) error { if eim.SaveRoundsInfoCalled != nil { return eim.SaveRoundsInfoCalled(info) } @@ -113,22 +95,27 @@ func (eim *ElasticProcessorStub) SaveRoundsInfo(info []*data.RoundInfo) error { } // SaveShardValidatorsPubKeys - -func (eim *ElasticProcessorStub) SaveShardValidatorsPubKeys(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error { +func (eim *ElasticProcessorStub) SaveShardValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error { if eim.SaveShardValidatorsPubKeysCalled != nil { - return eim.SaveShardValidatorsPubKeysCalled(shardID, epoch, shardValidatorsPubKeys) + return eim.SaveShardValidatorsPubKeysCalled(validatorsPubKeys) } return nil } // SaveAccounts - -func (eim *ElasticProcessorStub) SaveAccounts(timestamp uint64, acc []*data.Account, _ uint32) error { +func (eim *ElasticProcessorStub) SaveAccounts(accounts *outport.Accounts) error { if eim.SaveAccountsCalled != nil { - return eim.SaveAccountsCalled(timestamp, acc) + return eim.SaveAccountsCalled(accounts) } return nil } +// SetOutportConfig - +func (eim *ElasticProcessorStub) SetOutportConfig(_ outport.OutportConfig) error { + return nil +} + // IsInterfaceNil returns true if there is no value under the interface func (eim *ElasticProcessorStub) IsInterfaceNil() bool { return eim == nil diff --git a/mock/mockTransport.go b/mock/mockTransport.go new file mode 100644 index 00000000..d40e425f --- /dev/null +++ b/mock/mockTransport.go @@ -0,0 +1,14 @@ +package mock + +import "net/http" + +// TransportMock - +type TransportMock struct { + Response *http.Response + Err error +} + +// RoundTrip - +func (m *TransportMock) RoundTrip(_ *http.Request) (*http.Response, error) { + return m.Response, m.Err +} diff --git a/mock/pubkeyConverterMock.go b/mock/pubkeyConverterMock.go index 2495757f..e81d21ff 100644 --- a/mock/pubkeyConverterMock.go +++ b/mock/pubkeyConverterMock.go @@ -2,6 +2,8 @@ package mock import ( "encoding/hex" + + "github.com/multiversx/mx-chain-core-go/core" ) // PubkeyConverterMock - @@ -22,7 +24,23 @@ func (pcm *PubkeyConverterMock) Decode(humanReadable string) ([]byte, error) { } // Encode - -func (pcm *PubkeyConverterMock) Encode(pkBytes []byte) string { +func (pcm *PubkeyConverterMock) Encode(pkBytes []byte) (string, error) { + return hex.EncodeToString(pkBytes), nil +} + +// EncodeSlice - +func (pcm *PubkeyConverterMock) EncodeSlice(pkBytesSlice [][]byte) ([]string, error) { + encodedSlice := make([]string, 0) + + for _, pkBytes := range pkBytesSlice { + encodedSlice = append(encodedSlice, hex.EncodeToString(pkBytes)) + } + + return encodedSlice, nil +} + +// SilentEncode - +func (pcm *PubkeyConverterMock) SilentEncode(pkBytes []byte, log core.Logger) string { return hex.EncodeToString(pkBytes) } diff --git a/mock/pubkeyConverterStub.go b/mock/pubkeyConverterStub.go index f8ef5507..dcd3de04 100644 --- a/mock/pubkeyConverterStub.go +++ b/mock/pubkeyConverterStub.go @@ -1,10 +1,14 @@ package mock +import "github.com/multiversx/mx-chain-core-go/core" + // PubkeyConverterStub - type PubkeyConverterStub struct { - LenCalled func() int - DecodeCalled func(humanReadable string) ([]byte, error) - EncodeCalled func(pkBytes []byte) string + LenCalled func() int + DecodeCalled func(humanReadable string) ([]byte, error) + EncodeCalled func(pkBytes []byte) (string, error) + EncodeSliceCalled func(pkBytesSlice [][]byte) ([]string, error) + SilentEncodeCalled func(pkBytesSlice []byte, log core.Logger) string } // Len - @@ -26,11 +30,29 @@ func (pcs *PubkeyConverterStub) Decode(humanReadable string) ([]byte, error) { } // Encode - -func (pcs *PubkeyConverterStub) Encode(pkBytes []byte) string { +func (pcs *PubkeyConverterStub) Encode(pkBytes []byte) (string, error) { if pcs.EncodeCalled != nil { return pcs.EncodeCalled(pkBytes) } + return "", nil +} + +// Encode - +func (pcs *PubkeyConverterStub) EncodeSlice(pkBytesSlice [][]byte) ([]string, error) { + if pcs.EncodeSliceCalled != nil { + return pcs.EncodeSliceCalled(pkBytesSlice) + } + + return make([]string, 0), nil +} + +// SilentEncode - +func (pcs *PubkeyConverterStub) SilentEncode(pkBytes []byte, log core.Logger) string { + if pcs.SilentEncodeCalled != nil { + return pcs.SilentEncodeCalled(pkBytes, log) + } + return "" } diff --git a/process/dataindexer/dataDispatcher.go b/process/dataindexer/dataDispatcher.go deleted file mode 100644 index 55c4d51d..00000000 --- a/process/dataindexer/dataDispatcher.go +++ /dev/null @@ -1,197 +0,0 @@ -package dataindexer - -import ( - "context" - "errors" - "runtime/debug" - "sync" - "time" - - "github.com/multiversx/mx-chain-core-go/core/atomic" - "github.com/multiversx/mx-chain-core-go/core/check" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - logger "github.com/multiversx/mx-chain-logger-go" -) - -var log = logger.GetOrCreate("indexer") - -const ( - durationBetweenErrorRetry = time.Second * 3 - closeTimeout = time.Second * 20 - backOffTime = time.Second * 10 - maxBackOff = time.Minute * 5 -) - -type dataDispatcher struct { - backOffTime time.Duration - chanWorkItems chan workItems.WorkItemHandler - cancelFunc func() - wasClosed *atomic.Flag - currentWriteDone chan struct{} - closeStartTime time.Time - mutexCloseStartTime sync.RWMutex -} - -// NewDataDispatcher creates a new dataDispatcher instance, capable of saving sequentially data in elasticsearch database -func NewDataDispatcher(cacheSize int) (*dataDispatcher, error) { - if cacheSize < 0 { - return nil, ErrNegativeCacheSize - } - - dd := &dataDispatcher{ - chanWorkItems: make(chan workItems.WorkItemHandler, cacheSize), - wasClosed: &atomic.Flag{}, - currentWriteDone: make(chan struct{}), - mutexCloseStartTime: sync.RWMutex{}, - } - - return dd, nil -} - -// StartIndexData will start index data in database -func (d *dataDispatcher) StartIndexData() { - var ctx context.Context - ctx, d.cancelFunc = context.WithCancel(context.Background()) - - go d.doDataDispatch(ctx) -} - -func (d *dataDispatcher) doDataDispatch(ctx context.Context) { - defer func() { - r := recover() - if r != nil { - log.Error("d.doDataDispatch", - "message", r, - "panic", string(debug.Stack())) - panic(r) - } - }() - - for { - select { - case <-ctx.Done(): - d.stopWorker() - return - case wi := <-d.chanWorkItems: - timeoutOnClose := d.doWork(wi) - if timeoutOnClose { - d.stopWorker() - return - } - } - } -} - -func (d *dataDispatcher) stopWorker() { - log.Debug("dispatcher's go routine is stopping...") - d.currentWriteDone <- struct{}{} -} - -// Close will close the endless running go routine -func (d *dataDispatcher) Close() error { - previousState := d.wasClosed.SetReturningPrevious() - if previousState { - return nil - } - - d.mutexCloseStartTime.Lock() - d.closeStartTime = time.Now() - d.mutexCloseStartTime.Unlock() - - if d.cancelFunc != nil { - d.cancelFunc() - } - - <-d.currentWriteDone - d.consumeRemainingItems() - return nil -} - -func (d *dataDispatcher) consumeRemainingItems() { - for { - select { - case wi := <-d.chanWorkItems: - isTimeout := d.doWork(wi) - if isTimeout { - return - } - default: - return - } - } -} - -// Add will add a new item in queue -func (d *dataDispatcher) Add(item workItems.WorkItemHandler) { - if check.IfNil(item) { - log.Warn("dataDispatcher.Add nil item: will do nothing") - return - } - if d.wasClosed.IsSet() { - log.Warn("dataDispatcher.Add cannot add item: channel chanWorkItems is closed") - return - } - - d.chanWorkItems <- item -} - -func (d *dataDispatcher) doWork(wi workItems.WorkItemHandler) bool { - for { - if d.exitIfTimeoutOnClose() { - log.Warn("dataDispatcher.doWork could not index item", - "error", "timeout") - return true - } - - err := wi.Save() - if errors.Is(err, ErrBackOff) { - log.Warn("dataDispatcher.doWork could not index item", - "received back off:", err.Error()) - - d.increaseBackOffTime() - time.Sleep(d.backOffTime) - - continue - } - - d.backOffTime = 0 - if err != nil { - log.Warn("dataDispatcher.doWork could not index item (will retry)", "error", err.Error()) - time.Sleep(durationBetweenErrorRetry) - - continue - } - - return false - } - -} - -func (d *dataDispatcher) exitIfTimeoutOnClose() bool { - if !d.wasClosed.IsSet() { - return false - } - - d.mutexCloseStartTime.RLock() - passedTime := time.Since(d.closeStartTime) - d.mutexCloseStartTime.RUnlock() - - return passedTime > closeTimeout -} - -func (d *dataDispatcher) increaseBackOffTime() { - if d.backOffTime == 0 { - d.backOffTime = backOffTime - return - } - if d.backOffTime >= maxBackOff { - return - } - - d.backOffTime += d.backOffTime / 5 -} - -// IsInterfaceNil returns true if there is no value under the interface -func (d *dataDispatcher) IsInterfaceNil() bool { - return d == nil -} diff --git a/process/dataindexer/dataDispatcher_test.go b/process/dataindexer/dataDispatcher_test.go deleted file mode 100644 index cd7da157..00000000 --- a/process/dataindexer/dataDispatcher_test.go +++ /dev/null @@ -1,209 +0,0 @@ -package dataindexer - -import ( - "context" - "errors" - "fmt" - "sync" - "sync/atomic" - "testing" - "time" - - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestNewDataDispatcher_InvalidCacheSize(t *testing.T) { - t.Parallel() - - dataDist, err := NewDataDispatcher(-1) - - require.Nil(t, dataDist) - require.Equal(t, ErrNegativeCacheSize, err) -} - -func TestNewDataDispatcher(t *testing.T) { - t.Parallel() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - require.NotNil(t, dispatcher) -} - -func TestDataDispatcher_StartIndexDataClose(t *testing.T) { - t.Parallel() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - dispatcher.StartIndexData() - - called := false - wg := sync.WaitGroup{} - wg.Add(1) - elasticProc := &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - called = true - wg.Done() - return nil - }, - SaveAccountsCalled: func(timestamp uint64, acc []*data.Account) error { - time.Sleep(7 * time.Second) - return nil - }, - - SaveValidatorsRatingCalled: func(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error { - time.Sleep(6 * time.Second) - return nil - }, - } - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - wg.Wait() - - require.True(t, called) - - dispatcher.Add(workItems.NewItemAccounts(elasticProc, 0, nil, 0)) - wg.Add(1) - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - dispatcher.Add(workItems.NewItemRating(elasticProc, "", nil)) - wg.Add(1) - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - err = dispatcher.Close() - require.NoError(t, err) -} - -func TestDataDispatcher_Add(t *testing.T) { - t.Parallel() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - dispatcher.StartIndexData() - - calledCount := uint32(0) - wg := sync.WaitGroup{} - wg.Add(1) - elasticProc := &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - if calledCount < 2 { - atomic.AddUint32(&calledCount, 1) - return fmt.Errorf("%w: wrapped error", ErrBackOff) - } - - atomic.AddUint32(&calledCount, 1) - wg.Done() - return nil - }, - } - - start := time.Now() - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - wg.Wait() - - timePassed := time.Since(start) - require.Greater(t, 2*int64(timePassed), int64(backOffTime)) - - require.Equal(t, uint32(3), atomic.LoadUint32(&calledCount)) - - err = dispatcher.Close() - require.NoError(t, err) -} - -func TestDataDispatcher_AddWithErrorShouldRetryTheReprocessing(t *testing.T) { - t.Parallel() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - dispatcher.StartIndexData() - - calledCount := uint32(0) - wg := sync.WaitGroup{} - wg.Add(1) - elasticProc := &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - if calledCount < 2 { - atomic.AddUint32(&calledCount, 1) - return errors.New("generic error") - } - - atomic.AddUint32(&calledCount, 1) - wg.Done() - return nil - }, - } - - start := time.Now() - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - wg.Wait() - - timePassed := time.Since(start) - require.Greater(t, int64(timePassed), int64(2*durationBetweenErrorRetry)) - - require.Equal(t, uint32(3), atomic.LoadUint32(&calledCount)) - - err = dispatcher.Close() - require.NoError(t, err) -} - -func TestDataDispatcher_Close(t *testing.T) { - t.Parallel() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - dispatcher.StartIndexData() - - elasticProc := &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - time.Sleep(1000*time.Millisecond + 200*time.Microsecond) - return nil - }, - } - - wg := &sync.WaitGroup{} - wg.Add(1) - ctx, cancelFunc := context.WithCancel(context.Background()) - go func(c context.Context, w *sync.WaitGroup) { - count := 0 - for { - select { - case <-c.Done(): - return - default: - count++ - if count == 105 { - w.Done() - } - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - time.Sleep(50 * time.Millisecond) - } - } - }(ctx, wg) - - wg.Wait() - - err = dispatcher.Close() - require.NoError(t, err) - - cancelFunc() -} - -func TestDataDispatcher_RecoverPanic(t *testing.T) { - t.Parallel() - - defer func() { - r := recover() - require.NotNil(t, r) - }() - - dispatcher, err := NewDataDispatcher(100) - require.NoError(t, err) - - elasticProc := &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - panic(1) - }, - } - - dispatcher.Add(workItems.NewItemRounds(elasticProc, []*data.RoundInfo{})) - dispatcher.doDataDispatch(context.Background()) -} diff --git a/process/dataindexer/dataIndexer.go b/process/dataindexer/dataIndexer.go index 435ce566..9f75e343 100644 --- a/process/dataindexer/dataIndexer.go +++ b/process/dataindexer/dataIndexer.go @@ -1,26 +1,32 @@ package dataindexer import ( + "encoding/hex" + "fmt" + "time" + + "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/marshal" - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" + logger "github.com/multiversx/mx-chain-logger-go" ) +var log = logger.GetOrCreate("dataindexer") + // ArgDataIndexer is a structure that is used to store all the components that are needed to create an indexer type ArgDataIndexer struct { - Marshalizer marshal.Marshalizer - DataDispatcher DispatcherHandler + HeaderMarshaller marshal.Marshalizer ElasticProcessor ElasticProcessor + BlockContainer BlockContainerHandler } type dataIndexer struct { - isNilIndexer bool - dispatcher DispatcherHandler elasticProcessor ElasticProcessor - marshalizer marshal.Marshalizer + headerMarshaller marshal.Marshalizer + blockContainer BlockContainerHandler } // NewDataIndexer will create a new data indexer @@ -31,126 +37,169 @@ func NewDataIndexer(arguments ArgDataIndexer) (*dataIndexer, error) { } dataIndexerObj := &dataIndexer{ - isNilIndexer: false, - dispatcher: arguments.DataDispatcher, elasticProcessor: arguments.ElasticProcessor, - marshalizer: arguments.Marshalizer, + headerMarshaller: arguments.HeaderMarshaller, + blockContainer: arguments.BlockContainer, } return dataIndexerObj, nil } func checkIndexerArgs(arguments ArgDataIndexer) error { - if check.IfNil(arguments.DataDispatcher) { - return ErrNilDataDispatcher - } if check.IfNil(arguments.ElasticProcessor) { return ErrNilElasticProcessor } - if check.IfNil(arguments.Marshalizer) { + if check.IfNil(arguments.HeaderMarshaller) { return ErrNilMarshalizer } + if check.IfNilReflect(arguments.BlockContainer) { + return ErrNilBlockContainerHandler + } return nil } +func (di *dataIndexer) getHeaderFromBytes(headerType core.HeaderType, headerBytes []byte) (header data.HeaderHandler, err error) { + creator, err := di.blockContainer.Get(headerType) + if err != nil { + return nil, err + } + + return block.GetHeaderFromBytes(di.headerMarshaller, creator, headerBytes) +} + // SaveBlock saves the block info in the queue to be sent to elastic -func (di *dataIndexer) SaveBlock(args *outport.ArgsSaveBlockData) error { - wi := workItems.NewItemBlock( - di.elasticProcessor, - di.marshalizer, - args, - ) - di.dispatcher.Add(wi) +func (di *dataIndexer) SaveBlock(outportBlock *outport.OutportBlock) error { + header, err := di.getHeaderFromBytes(core.HeaderType(outportBlock.BlockData.HeaderType), outportBlock.BlockData.HeaderBytes) + if err != nil { + return err + } + + headerHash := outportBlock.BlockData.HeaderHash + shardID := header.GetShardID() + headerNonce := header.GetNonce() + startTime := time.Now() + defer func() { + log.Debug("di.SaveBlockData", + "duration", time.Since(startTime), + "shardID", shardID, + "nonce", headerNonce, + "hash", headerHash, + ) + }() + log.Debug("indexer: starting indexing block", "hash", headerHash, "nonce", headerNonce) + + if outportBlock.TransactionPool == nil { + outportBlock.TransactionPool = &outport.TransactionPool{} + } + + return di.saveBlockData(outportBlock, header) +} + +func (di *dataIndexer) saveBlockData(outportBlock *outport.OutportBlock, header data.HeaderHandler) error { + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + OutportBlock: outportBlock, + Header: header, + } + + headerHash := outportBlock.BlockData.HeaderHash + headerNonce := header.GetNonce() + err := di.elasticProcessor.SaveHeader(outportBlockWithHeader) + if err != nil { + return fmt.Errorf("%w when saving header block, hash %s, nonce %d", + err, hex.EncodeToString(headerHash), headerNonce) + } + + if len(outportBlock.BlockData.Body.MiniBlocks) == 0 { + return nil + } + + miniBlocks := append(outportBlock.BlockData.Body.MiniBlocks, outportBlock.BlockData.IntraShardMiniBlocks...) + err = di.elasticProcessor.SaveMiniblocks(header, miniBlocks) + if err != nil { + return fmt.Errorf("%w when saving miniblocks, block hash %s, nonce %d", + err, hex.EncodeToString(headerHash), headerNonce) + } + + err = di.elasticProcessor.SaveTransactions(outportBlockWithHeader) + if err != nil { + return fmt.Errorf("%w when saving transactions, block hash %s, nonce %d", + err, hex.EncodeToString(headerHash), headerNonce) + } return nil } // Close will stop goroutine that index data in database func (di *dataIndexer) Close() error { - return di.dispatcher.Close() + return nil } // RevertIndexedBlock will remove from database block and miniblocks -func (di *dataIndexer) RevertIndexedBlock(header coreData.HeaderHandler, body coreData.BodyHandler) error { - wi := workItems.NewItemRemoveBlock( - di.elasticProcessor, - body, - header, - ) - di.dispatcher.Add(wi) +func (di *dataIndexer) RevertIndexedBlock(blockData *outport.BlockData) error { + header, err := di.getHeaderFromBytes(core.HeaderType(blockData.HeaderType), blockData.HeaderBytes) + if err != nil { + return err + } - return nil + err = di.elasticProcessor.RemoveHeader(header) + if err != nil { + return err + } + + err = di.elasticProcessor.RemoveMiniblocks(header, blockData.Body) + if err != nil { + return err + } + + err = di.elasticProcessor.RemoveTransactions(header, blockData.Body) + if err != nil { + return err + } + + return di.elasticProcessor.RemoveAccountsESDT(header.GetTimeStamp(), header.GetShardID()) } // SaveRoundsInfo will save data about a slice of rounds in elasticsearch -func (di *dataIndexer) SaveRoundsInfo(rf []*outport.RoundInfo) error { - roundsInfo := make([]*data.RoundInfo, 0) - for _, info := range rf { - roundsInfo = append(roundsInfo, &data.RoundInfo{ - Index: info.Index, - SignersIndexes: info.SignersIndexes, - BlockWasProposed: info.BlockWasProposed, - ShardId: info.ShardId, - Epoch: info.Epoch, - Timestamp: info.Timestamp, - }) - } - - wi := workItems.NewItemRounds(di.elasticProcessor, roundsInfo) - di.dispatcher.Add(wi) - - return nil +func (di *dataIndexer) SaveRoundsInfo(rounds *outport.RoundsInfo) error { + return di.elasticProcessor.SaveRoundsInfo(rounds) } // SaveValidatorsRating will save all validators rating info to elasticsearch -func (di *dataIndexer) SaveValidatorsRating(indexID string, validatorsRatingInfo []*outport.ValidatorRatingInfo) error { - valRatingInfo := make([]*data.ValidatorRatingInfo, 0) - for _, info := range validatorsRatingInfo { - valRatingInfo = append(valRatingInfo, &data.ValidatorRatingInfo{ - PublicKey: info.PublicKey, - Rating: info.Rating, - }) - } - - wi := workItems.NewItemRating( - di.elasticProcessor, - indexID, - valRatingInfo, - ) - di.dispatcher.Add(wi) - - return nil +func (di *dataIndexer) SaveValidatorsRating(ratingData *outport.ValidatorsRating) error { + return di.elasticProcessor.SaveValidatorsRating(ratingData) } // SaveValidatorsPubKeys will save all validators public keys to elasticsearch -func (di *dataIndexer) SaveValidatorsPubKeys(validatorsPubKeys map[uint32][][]byte, epoch uint32) error { - wi := workItems.NewItemValidators( - di.elasticProcessor, - epoch, - validatorsPubKeys, - ) - di.dispatcher.Add(wi) - - return nil +func (di *dataIndexer) SaveValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error { + return di.elasticProcessor.SaveShardValidatorsPubKeys(validatorsPubKeys) } // SaveAccounts will save the provided accounts -func (di *dataIndexer) SaveAccounts(timestamp uint64, accounts map[string]*outport.AlteredAccount, shardID uint32) error { - wi := workItems.NewItemAccounts(di.elasticProcessor, timestamp, accounts, shardID) - di.dispatcher.Add(wi) +func (di *dataIndexer) SaveAccounts(accounts *outport.Accounts) error { + return di.elasticProcessor.SaveAccounts(accounts) +} +// FinalizedBlock returns nil +func (di *dataIndexer) FinalizedBlock(_ *outport.FinalizedBlock) error { return nil } -// FinalizedBlock returns nil -func (di *dataIndexer) FinalizedBlock(_ []byte) error { +// GetMarshaller return the marshaller +func (di *dataIndexer) GetMarshaller() marshal.Marshalizer { + return di.headerMarshaller +} + +// RegisterHandler will do nothing +func (di *dataIndexer) RegisterHandler(_ func() error, _ string) error { return nil } -// IsNilIndexer will return a bool value that signals if the indexer's implementation is a NilIndexer -func (di *dataIndexer) IsNilIndexer() bool { - return di.isNilIndexer +// SetCurrentSettings will set the provided settings +func (di *dataIndexer) SetCurrentSettings(cfg outport.OutportConfig) error { + log.Debug("dataIndexer.SetCurrentSettings", "importDBMode", cfg.IsInImportDBMode) + + return di.elasticProcessor.SetOutportConfig(cfg) } // IsInterfaceNil returns true if there is no value under the interface diff --git a/process/dataindexer/dataIndexer_test.go b/process/dataindexer/dataIndexer_test.go index 5938110c..88970177 100644 --- a/process/dataindexer/dataIndexer_test.go +++ b/process/dataindexer/dataIndexer_test.go @@ -5,30 +5,21 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" + coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" "github.com/stretchr/testify/require" ) func NewDataIndexerArguments() ArgDataIndexer { return ArgDataIndexer{ - Marshalizer: &mock.MarshalizerMock{}, - DataDispatcher: &mock.DispatcherMock{}, ElasticProcessor: &mock.ElasticProcessorStub{}, + HeaderMarshaller: &mock.MarshalizerMock{}, + BlockContainer: &mock.BlockContainerStub{}, } } -func TestDataIndexer_NewIndexerWithNilDataDispatcherShouldErr(t *testing.T) { - arguments := NewDataIndexerArguments() - arguments.DataDispatcher = nil - ei, err := NewDataIndexer(arguments) - - require.Nil(t, ei) - require.Equal(t, ErrNilDataDispatcher, err) -} - func TestDataIndexer_NewIndexerWithNilElasticProcessorShouldErr(t *testing.T) { arguments := NewDataIndexerArguments() arguments.ElasticProcessor = nil @@ -40,7 +31,7 @@ func TestDataIndexer_NewIndexerWithNilElasticProcessorShouldErr(t *testing.T) { func TestDataIndexer_NewIndexerWithNilMarshalizerShouldErr(t *testing.T) { arguments := NewDataIndexerArguments() - arguments.Marshalizer = nil + arguments.HeaderMarshaller = nil ei, err := NewDataIndexer(arguments) require.Nil(t, ei) @@ -54,48 +45,64 @@ func TestDataIndexer_NewIndexerWithCorrectParamsShouldWork(t *testing.T) { require.Nil(t, err) require.False(t, check.IfNil(ei)) - require.False(t, ei.IsNilIndexer()) } func TestDataIndexer_SaveBlock(t *testing.T) { - called := false + countMap := map[int]int{} arguments := NewDataIndexerArguments() - arguments.DataDispatcher = &mock.DispatcherMock{ - AddCalled: func(item workItems.WorkItemHandler) { - called = true + arguments.BlockContainer = &mock.BlockContainerStub{ + GetCalled: func(headerType core.HeaderType) (dataBlock.EmptyBlockCreator, error) { + return dataBlock.NewEmptyHeaderV2Creator(), nil + }, + } + + arguments.ElasticProcessor = &mock.ElasticProcessorStub{ + SaveHeaderCalled: func(outportBlockWithHeader *outport.OutportBlockWithHeader) error { + countMap[0]++ + return nil + }, + SaveMiniblocksCalled: func(header coreData.HeaderHandler, miniBlocks []*dataBlock.MiniBlock) error { + countMap[1]++ + return nil + }, + SaveTransactionsCalled: func(outportBlockWithHeader *outport.OutportBlockWithHeader) error { + countMap[2]++ + return nil }, } ei, _ := NewDataIndexer(arguments) - args := &outport.ArgsSaveBlockData{ - HeaderHash: []byte("hash"), - Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{}}, - Header: nil, - SignersIndexes: nil, - NotarizedHeadersHashes: nil, - TransactionsPool: nil, + args := &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderType: string(core.ShardHeaderV2), + Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, + HeaderBytes: []byte("{}"), + }, } err := ei.SaveBlock(args) - require.True(t, called) require.Nil(t, err) + require.Equal(t, 1, countMap[0]) + require.Equal(t, 1, countMap[1]) + require.Equal(t, 1, countMap[2]) } func TestDataIndexer_SaveRoundInfo(t *testing.T) { called := false arguments := NewDataIndexerArguments() - arguments.DataDispatcher = &mock.DispatcherMock{ - AddCalled: func(item workItems.WorkItemHandler) { + + arguments.HeaderMarshaller = &mock.MarshalizerMock{Fail: true} + arguments.ElasticProcessor = &mock.ElasticProcessorStub{ + SaveRoundsInfoCalled: func(infos *outport.RoundsInfo) error { called = true + return nil }, } - - arguments.Marshalizer = &mock.MarshalizerMock{Fail: true} ei, _ := NewDataIndexer(arguments) _ = ei.Close() - err := ei.SaveRoundsInfo([]*outport.RoundInfo{}) + err := ei.SaveRoundsInfo(&outport.RoundsInfo{}) require.True(t, called) require.Nil(t, err) } @@ -104,9 +111,10 @@ func TestDataIndexer_SaveValidatorsPubKeys(t *testing.T) { called := false arguments := NewDataIndexerArguments() - arguments.DataDispatcher = &mock.DispatcherMock{ - AddCalled: func(item workItems.WorkItemHandler) { + arguments.ElasticProcessor = &mock.ElasticProcessorStub{ + SaveShardValidatorsPubKeysCalled: func(validators *outport.ValidatorsPubKeys) error { called = true + return nil }, } ei, _ := NewDataIndexer(arguments) @@ -115,9 +123,8 @@ func TestDataIndexer_SaveValidatorsPubKeys(t *testing.T) { keys := [][]byte{[]byte("key")} valPubKey[0] = keys - epoch := uint32(0) - err := ei.SaveValidatorsPubKeys(valPubKey, epoch) + err := ei.SaveValidatorsPubKeys(&outport.ValidatorsPubKeys{}) require.True(t, called) require.Nil(t, err) } @@ -126,32 +133,55 @@ func TestDataIndexer_SaveValidatorsRating(t *testing.T) { called := false arguments := NewDataIndexerArguments() - arguments.DataDispatcher = &mock.DispatcherMock{ - AddCalled: func(item workItems.WorkItemHandler) { + arguments.ElasticProcessor = &mock.ElasticProcessorStub{ + SaveValidatorsRatingCalled: func(validatorsRating *outport.ValidatorsRating) error { called = true + return nil }, } ei, _ := NewDataIndexer(arguments) - err := ei.SaveValidatorsRating("ID", []*outport.ValidatorRatingInfo{ - {Rating: 1}, {Rating: 2}, - }) + err := ei.SaveValidatorsRating(&outport.ValidatorsRating{}) require.True(t, called) require.Nil(t, err) } func TestDataIndexer_RevertIndexedBlock(t *testing.T) { - called := false + countMap := map[int]int{} arguments := NewDataIndexerArguments() - arguments.DataDispatcher = &mock.DispatcherMock{ - AddCalled: func(item workItems.WorkItemHandler) { - called = true + arguments.BlockContainer = &mock.BlockContainerStub{ + GetCalled: func(headerType core.HeaderType) (dataBlock.EmptyBlockCreator, error) { + return dataBlock.NewEmptyHeaderV2Creator(), nil + }} + arguments.ElasticProcessor = &mock.ElasticProcessorStub{ + RemoveHeaderCalled: func(header coreData.HeaderHandler) error { + countMap[0]++ + return nil + }, + RemoveMiniblocksCalled: func(header coreData.HeaderHandler, body *dataBlock.Body) error { + countMap[1]++ + return nil + }, + RemoveTransactionsCalled: func(header coreData.HeaderHandler, body *dataBlock.Body) error { + countMap[2]++ + return nil + }, + RemoveAccountsESDTCalled: func(headerTimestamp uint64) error { + countMap[3]++ + return nil }, } ei, _ := NewDataIndexer(arguments) - err := ei.RevertIndexedBlock(&dataBlock.Header{}, &dataBlock.Body{}) - require.True(t, called) + err := ei.RevertIndexedBlock(&outport.BlockData{ + HeaderType: string(core.ShardHeaderV2), + Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, + HeaderBytes: []byte("{}"), + }) require.Nil(t, err) + require.Equal(t, 1, countMap[0]) + require.Equal(t, 1, countMap[1]) + require.Equal(t, 1, countMap[2]) + require.Equal(t, 1, countMap[3]) } diff --git a/process/dataindexer/errors.go b/process/dataindexer/errors.go index 0e2a33a7..9b4fbd0f 100644 --- a/process/dataindexer/errors.go +++ b/process/dataindexer/errors.go @@ -11,15 +11,9 @@ var ErrNilPubkeyConverter = errors.New("nil pubkey converter") // ErrNegativeDenominationValue signals that a negative denomination value has been provided var ErrNegativeDenominationValue = errors.New("negative denomination value") -// ErrNilDataDispatcher signals that an operation has been attempted to or with a nil data dispatcher implementation -var ErrNilDataDispatcher = errors.New("nil data dispatcher") - // ErrNilElasticProcessor signals that an operation has been attempted to or with a nil elastic processor implementation var ErrNilElasticProcessor = errors.New("nil elastic processor") -// ErrNegativeCacheSize signals that an invalid cache size has been provided -var ErrNegativeCacheSize = errors.New("negative cache size") - // ErrEmptyEnabledIndexes signals that an empty slice of enables indexes has been provided var ErrEmptyEnabledIndexes = errors.New("empty enabled indexes slice") @@ -91,3 +85,6 @@ var ErrNilBalanceConverter = errors.New("nil balance converter") // ErrNilOperationsHandler signals that a nil operations handler has been provided var ErrNilOperationsHandler = errors.New("nil operations handler") + +// ErrNilBlockContainerHandler signals that a nil block container handler has been provided +var ErrNilBlockContainerHandler = errors.New("nil bock container handler") diff --git a/process/dataindexer/interface.go b/process/dataindexer/interface.go index 242b5afd..cb99ab00 100644 --- a/process/dataindexer/interface.go +++ b/process/dataindexer/interface.go @@ -3,43 +3,27 @@ package dataindexer import ( "math/big" + "github.com/multiversx/mx-chain-core-go/core" coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" + "github.com/multiversx/mx-chain-core-go/marshal" ) -// DispatcherHandler defines the interface for the dispatcher that will manage when items are saved in elasticsearch database -type DispatcherHandler interface { - StartIndexData() - Close() error - Add(item workItems.WorkItemHandler) - IsInterfaceNil() bool -} - // ElasticProcessor defines the interface for the elastic search indexer type ElasticProcessor interface { - SaveHeader( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - txsSize int, - pool *outport.Pool, - ) error + SaveHeader(outportBlockWithHeader *outport.OutportBlockWithHeader) error RemoveHeader(header coreData.HeaderHandler) error RemoveMiniblocks(header coreData.HeaderHandler, body *block.Body) error RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error RemoveAccountsESDT(headerTimestamp uint64, shardID uint32) error - SaveMiniblocks(header coreData.HeaderHandler, body *block.Body) error - SaveTransactions(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount, isImportDB bool, numOfShards uint32) error - SaveValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error - SaveRoundsInfo(infos []*data.RoundInfo) error - SaveShardValidatorsPubKeys(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error - SaveAccounts(blockTimestamp uint64, accounts []*data.Account, shardID uint32) error + SaveMiniblocks(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) error + SaveTransactions(outportBlockWithHeader *outport.OutportBlockWithHeader) error + SaveValidatorsRating(ratingData *outport.ValidatorsRating) error + SaveRoundsInfo(rounds *outport.RoundsInfo) error + SaveShardValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error + SaveAccounts(accounts *outport.Accounts) error + SetOutportConfig(cfg outport.OutportConfig) error IsInterfaceNil() bool } @@ -64,22 +48,29 @@ type ShardCoordinator interface { // Indexer is an interface for saving node specific data to other storage. // This could be an elastic search index, a MySql database or any other external services. type Indexer interface { - SaveBlock(args *outport.ArgsSaveBlockData) error - RevertIndexedBlock(header coreData.HeaderHandler, body coreData.BodyHandler) error - SaveRoundsInfo(roundsInfos []*outport.RoundInfo) error - SaveValidatorsPubKeys(validatorsPubKeys map[uint32][][]byte, epoch uint32) error - SaveValidatorsRating(indexID string, infoRating []*outport.ValidatorRatingInfo) error - SaveAccounts(blockTimestamp uint64, acc map[string]*outport.AlteredAccount, shardID uint32) error - FinalizedBlock(headerHash []byte) error + SaveBlock(outportBlock *outport.OutportBlock) error + RevertIndexedBlock(blockData *outport.BlockData) error + SaveRoundsInfo(roundsInfos *outport.RoundsInfo) error + SaveValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error + SaveValidatorsRating(ratingData *outport.ValidatorsRating) error + SaveAccounts(accountsData *outport.Accounts) error + FinalizedBlock(finalizedBlock *outport.FinalizedBlock) error + GetMarshaller() marshal.Marshalizer + RegisterHandler(handler func() error, topic string) error + SetCurrentSettings(cfg outport.OutportConfig) error Close() error IsInterfaceNil() bool - IsNilIndexer() bool } // BalanceConverter defines what a balance converter should be able to do type BalanceConverter interface { ComputeBalanceAsFloat(balance *big.Int) (float64, error) - ComputeESDTBalanceAsFloat(balance *big.Int) (float64, error) + ConvertBigValueToFloat(value *big.Int) (float64, error) ComputeSliceOfStringsAsFloat(values []string) ([]float64, error) IsInterfaceNil() bool } + +// BlockContainerHandler defines what a block container should be able to do +type BlockContainerHandler interface { + Get(headerType core.HeaderType) (block.EmptyBlockCreator, error) +} diff --git a/process/dataindexer/nilIndexer.go b/process/dataindexer/nilIndexer.go deleted file mode 100644 index e4009687..00000000 --- a/process/dataindexer/nilIndexer.go +++ /dev/null @@ -1,65 +0,0 @@ -package dataindexer - -import ( - "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/outport" -) - -// NilIndexer will be used when an Indexer is required, but another one isn't necessary or available -type NilIndexer struct { -} - -// NewNilIndexer will return a Nil indexer -func NewNilIndexer() *NilIndexer { - return new(NilIndexer) -} - -// SaveBlock returns nil -func (ni *NilIndexer) SaveBlock(_ *outport.ArgsSaveBlockData) error { - return nil -} - -// RevertIndexedBlock returns nil -func (ni *NilIndexer) RevertIndexedBlock(_ data.HeaderHandler, _ data.BodyHandler) error { - return nil -} - -// SaveRoundsInfo returns nil -func (ni *NilIndexer) SaveRoundsInfo(_ []*outport.RoundInfo) error { - return nil -} - -// SaveValidatorsRating returns nil -func (ni *NilIndexer) SaveValidatorsRating(_ string, _ []*outport.ValidatorRatingInfo) error { - return nil -} - -// SaveValidatorsPubKeys returns nil -func (ni *NilIndexer) SaveValidatorsPubKeys(_ map[uint32][][]byte, _ uint32) error { - return nil -} - -// SaveAccounts returns nil -func (ni *NilIndexer) SaveAccounts(_ uint64, _ map[string]*outport.AlteredAccount, _ uint32) error { - return nil -} - -// Close will do nothing -func (ni *NilIndexer) Close() error { - return nil -} - -// FinalizedBlock returns nil -func (ni *NilIndexer) FinalizedBlock(_ []byte) error { - return nil -} - -// IsInterfaceNil returns true if there is no value under the interface -func (ni *NilIndexer) IsInterfaceNil() bool { - return ni == nil -} - -// IsNilIndexer will return a bool value that signals if the indexer's implementation is a NilIndexer -func (ni *NilIndexer) IsNilIndexer() bool { - return true -} diff --git a/process/dataindexer/workItems/interface.go b/process/dataindexer/workItems/interface.go deleted file mode 100644 index b65e51ef..00000000 --- a/process/dataindexer/workItems/interface.go +++ /dev/null @@ -1,52 +0,0 @@ -package workItems - -import ( - coreData "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-es-indexer-go/data" -) - -// WorkItemHandler defines the interface for item that needs to be saved in elasticsearch database -type WorkItemHandler interface { - Save() error - IsInterfaceNil() bool -} - -type saveBlockIndexer interface { - SaveHeader( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - txsSize int, - pool *outport.Pool, - ) error - SaveMiniblocks(header coreData.HeaderHandler, body *block.Body) error - SaveTransactions(body *block.Body, header coreData.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount, isImportDB bool, numOfShards uint32) error -} - -type saveRatingIndexer interface { - SaveValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error -} - -type removeIndexer interface { - RemoveHeader(header coreData.HeaderHandler) error - RemoveMiniblocks(header coreData.HeaderHandler, body *block.Body) error - RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error - RemoveAccountsESDT(headerTimestamp uint64, shardID uint32) error -} - -type saveRounds interface { - SaveRoundsInfo(infos []*data.RoundInfo) error -} - -type saveValidatorsIndexer interface { - SaveShardValidatorsPubKeys(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error -} - -type saveAccountsIndexer interface { - SaveAccounts(blockTimestamp uint64, accounts []*data.Account, shardID uint32) error -} diff --git a/process/dataindexer/workItems/workItemAccounts.go b/process/dataindexer/workItems/workItemAccounts.go deleted file mode 100644 index 38852fab..00000000 --- a/process/dataindexer/workItems/workItemAccounts.go +++ /dev/null @@ -1,54 +0,0 @@ -package workItems - -import ( - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-es-indexer-go/data" -) - -type itemAccounts struct { - indexer saveAccountsIndexer - blockTimestamp uint64 - accounts map[string]*outport.AlteredAccount - shardID uint32 -} - -// NewItemAccounts will create a new instance of itemAccounts -func NewItemAccounts( - indexer saveAccountsIndexer, - blockTimestamp uint64, - accounts map[string]*outport.AlteredAccount, - shardID uint32, -) WorkItemHandler { - return &itemAccounts{ - indexer: indexer, - accounts: accounts, - blockTimestamp: blockTimestamp, - shardID: shardID, - } -} - -// Save will save information about an account -func (wiv *itemAccounts) Save() error { - accounts := make([]*data.Account, 0, len(wiv.accounts)) - for _, account := range wiv.accounts { - accounts = append(accounts, &data.Account{ - UserAccount: account, - IsSender: false, - }) - } - - err := wiv.indexer.SaveAccounts(wiv.blockTimestamp, accounts, wiv.shardID) - if err != nil { - log.Warn("itemAccounts.Save", - "could not index account", - "error", err.Error()) - return err - } - - return nil -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wiv *itemAccounts) IsInterfaceNil() bool { - return wiv == nil -} diff --git a/process/dataindexer/workItems/workItemAccounts_test.go b/process/dataindexer/workItems/workItemAccounts_test.go deleted file mode 100644 index c4d85077..00000000 --- a/process/dataindexer/workItems/workItemAccounts_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package workItems_test - -import ( - "errors" - "testing" - - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestItemAccounts_Save(t *testing.T) { - called := false - itemAccounts := workItems.NewItemAccounts( - &mock.ElasticProcessorStub{ - SaveAccountsCalled: func(_ uint64, _ []*data.Account) error { - called = true - return nil - }, - }, - 0, - make(map[string]*outport.AlteredAccount), 0, - ) - require.False(t, itemAccounts.IsInterfaceNil()) - - err := itemAccounts.Save() - require.NoError(t, err) - require.True(t, called) -} - -func TestItemAccounts_SaveAccountsShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemAccounts := workItems.NewItemAccounts( - &mock.ElasticProcessorStub{ - SaveAccountsCalled: func(_ uint64, _ []*data.Account) error { - return localErr - }, - }, - 0, - make(map[string]*outport.AlteredAccount), 0, - ) - require.False(t, itemAccounts.IsInterfaceNil()) - - err := itemAccounts.Save() - require.Equal(t, localErr, err) -} diff --git a/process/dataindexer/workItems/workItemBlock.go b/process/dataindexer/workItems/workItemBlock.go deleted file mode 100644 index 52864da0..00000000 --- a/process/dataindexer/workItems/workItemBlock.go +++ /dev/null @@ -1,135 +0,0 @@ -package workItems - -import ( - "encoding/hex" - "errors" - "fmt" - "time" - - "github.com/multiversx/mx-chain-core-go/core/check" - "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-core-go/marshal" - logger "github.com/multiversx/mx-chain-logger-go" -) - -// ErrBodyTypeAssertion signals that body type assertion failed -var ErrBodyTypeAssertion = errors.New("elasticsearch - body type assertion failed") - -var log = logger.GetOrCreate("indexer/workItems") - -type itemBlock struct { - indexer saveBlockIndexer - marshalizer marshal.Marshalizer - argsSaveBlock *outport.ArgsSaveBlockData -} - -// NewItemBlock will create a new instance of ItemBlock -func NewItemBlock( - indexer saveBlockIndexer, - marshalizer marshal.Marshalizer, - args *outport.ArgsSaveBlockData, -) WorkItemHandler { - return &itemBlock{ - indexer: indexer, - marshalizer: marshalizer, - argsSaveBlock: args, - } -} - -// Save will prepare and save a block item in elasticsearch database -func (wib *itemBlock) Save() error { - if check.IfNil(wib.argsSaveBlock.Header) { - log.Warn("nil header provided when trying to index block, will skip") - return nil - } - - defer func(startTime time.Time) { - log.Debug("wib.SaveBlockData duration", "time", time.Since(startTime)) - }(time.Now()) - - log.Debug("indexer: starting indexing block", - "hash", wib.argsSaveBlock.HeaderHash, - "nonce", wib.argsSaveBlock.Header.GetNonce()) - - body, ok := wib.argsSaveBlock.Body.(*block.Body) - if !ok { - return fmt.Errorf("%w when trying body assertion, block hash %s, nonce %d", - ErrBodyTypeAssertion, wib.argsSaveBlock.HeaderHash, wib.argsSaveBlock.Header.GetNonce()) - } - - if wib.argsSaveBlock.TransactionsPool == nil { - wib.argsSaveBlock.TransactionsPool = &outport.Pool{} - } - - txsSizeInBytes := ComputeSizeOfTxs(wib.marshalizer, wib.argsSaveBlock.TransactionsPool) - err := wib.indexer.SaveHeader( - wib.argsSaveBlock.HeaderHash, - wib.argsSaveBlock.Header, - wib.argsSaveBlock.SignersIndexes, - body, - wib.argsSaveBlock.NotarizedHeadersHashes, - wib.argsSaveBlock.HeaderGasConsumption, - txsSizeInBytes, - wib.argsSaveBlock.TransactionsPool, - ) - if err != nil { - return fmt.Errorf("%w when saving header block, hash %s, nonce %d", - err, hex.EncodeToString(wib.argsSaveBlock.HeaderHash), wib.argsSaveBlock.Header.GetNonce()) - } - - if len(body.MiniBlocks) == 0 { - return nil - } - - err = wib.indexer.SaveMiniblocks(wib.argsSaveBlock.Header, body) - if err != nil { - return fmt.Errorf("%w when saving miniblocks, block hash %s, nonce %d", - err, hex.EncodeToString(wib.argsSaveBlock.HeaderHash), wib.argsSaveBlock.Header.GetNonce()) - } - - err = wib.indexer.SaveTransactions(body, wib.argsSaveBlock.Header, wib.argsSaveBlock.TransactionsPool, wib.argsSaveBlock.AlteredAccounts, wib.argsSaveBlock.IsImportDB, wib.argsSaveBlock.NumberOfShards) - if err != nil { - return fmt.Errorf("%w when saving transactions, block hash %s, nonce %d", - err, hex.EncodeToString(wib.argsSaveBlock.HeaderHash), wib.argsSaveBlock.Header.GetNonce()) - } - - return nil -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wib *itemBlock) IsInterfaceNil() bool { - return wib == nil -} - -// ComputeSizeOfTxs will compute size of transactions in bytes -func ComputeSizeOfTxs(marshalizer marshal.Marshalizer, pool *outport.Pool) int { - sizeTxs := 0 - sizeTxs += computeSizeOfMapTxs(marshalizer, pool.Txs) - sizeTxs += computeSizeOfMapTxs(marshalizer, pool.Scrs) - sizeTxs += computeSizeOfMapTxs(marshalizer, pool.Invalid) - sizeTxs += computeSizeOfMapTxs(marshalizer, pool.Rewards) - sizeTxs += computeSizeOfMapTxs(marshalizer, pool.Receipts) - - return sizeTxs -} - -func computeSizeOfMapTxs(marshalizer marshal.Marshalizer, mapTxs map[string]data.TransactionHandlerWithGasUsedAndFee) int { - txsSize := 0 - for _, tx := range mapTxs { - txsSize += computeTxSize(marshalizer, tx.GetTxHandler()) - } - - return txsSize -} - -func computeTxSize(marshalizer marshal.Marshalizer, tx data.TransactionHandler) int { - txBytes, err := marshalizer.Marshal(tx) - if err != nil { - log.Debug("itemBlock.computeTxSize", "error", err) - return 0 - } - - return len(txBytes) -} diff --git a/process/dataindexer/workItems/workItemBlock_test.go b/process/dataindexer/workItems/workItemBlock_test.go deleted file mode 100644 index d17dd917..00000000 --- a/process/dataindexer/workItems/workItemBlock_test.go +++ /dev/null @@ -1,212 +0,0 @@ -package workItems_test - -import ( - "errors" - "fmt" - "math/big" - "reflect" - "testing" - "time" - - "github.com/multiversx/mx-chain-core-go/data" - dataBlock "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-core-go/data/transaction" - "github.com/multiversx/mx-chain-core-go/marshal" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" -) - -func generateTxs(numTxs int) map[string]data.TransactionHandlerWithGasUsedAndFee { - txs := make(map[string]data.TransactionHandlerWithGasUsedAndFee, numTxs) - for i := 0; i < numTxs; i++ { - tx := &transaction.Transaction{ - Nonce: uint64(i), - Value: big.NewInt(int64(i)), - RcvAddr: []byte("443e79a8d99ba093262c1db48c58ab3d59bcfeb313ca5cddf2a9d1d06f9894ec"), - SndAddr: []byte("443e79a8d99ba093262c1db48c58ab3d59bcfeb313ca5cddf2a9d1d06f9894ec"), - GasPrice: 10000000, - GasLimit: 1000, - Data: []byte("dasjdksakjdksajdjksajkdjkasjdksajkdasjdksakjdksajdjksajkdjkasjdksajkdasjdksakjdksajdjksajkdjkasjdksajk"), - Signature: []byte("randomSignatureasdasldkasdsahjgdlhjaskldsjkaldjklasjkdjskladjkl;sajkl"), - } - txs[fmt.Sprintf("%d", i)] = outport.NewTransactionHandlerWithGasAndFee(tx, 0, big.NewInt(0)) - } - - return txs -} - -func TestItemBlock_SaveNilHeaderShouldRetNil(t *testing.T) { - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{}, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{}, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - assert.Nil(t, err) -} - -func TestItemBlock_SaveHeaderShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData outport.HeaderGasConsumption, txsSize int, _ *outport.Pool) error { - return localErr - }, - }, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{ - Header: &dataBlock.Header{}, - Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, - TransactionsPool: &outport.Pool{}, - }, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - require.True(t, errors.Is(err, localErr)) -} - -func TestItemBlock_SaveNoMiniblocksShoulCallSaveHeader(t *testing.T) { - countCalled := 0 - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData outport.HeaderGasConsumption, txsSize int, _ *outport.Pool) error { - countCalled++ - return nil - }, - SaveMiniblocksCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - countCalled++ - return nil - }, - SaveTransactionsCalled: func(body *dataBlock.Body, header data.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount) error { - countCalled++ - return nil - }, - }, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{ - Body: &dataBlock.Body{}, - Header: &dataBlock.Header{}, - TransactionsPool: &outport.Pool{}, - }, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - require.NoError(t, err) - require.Equal(t, 1, countCalled) -} - -func TestItemBlock_SaveMiniblocksShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{ - SaveMiniblocksCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - return localErr - }, - }, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{ - Header: &dataBlock.Header{}, - Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, - TransactionsPool: &outport.Pool{}, - }, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - require.True(t, errors.Is(err, localErr)) -} - -func TestItemBlock_SaveTransactionsShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{ - SaveTransactionsCalled: func(body *dataBlock.Body, header data.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount) error { - return localErr - }, - }, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{ - Header: &dataBlock.Header{}, - Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, - TransactionsPool: &outport.Pool{}, - }, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - require.True(t, errors.Is(err, localErr)) -} - -func TestItemBlock_SaveShouldWork(t *testing.T) { - countCalled := 0 - itemBlock := workItems.NewItemBlock( - &mock.ElasticProcessorStub{ - SaveHeaderCalled: func(headerHash []byte, header data.HeaderHandler, signersIndexes []uint64, body *dataBlock.Body, notarizedHeadersHashes []string, gasConsumptionData outport.HeaderGasConsumption, txsSize int, _ *outport.Pool) error { - countCalled++ - return nil - }, - SaveMiniblocksCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - countCalled++ - return nil - }, - SaveTransactionsCalled: func(body *dataBlock.Body, header data.HeaderHandler, pool *outport.Pool, coreAlteredAccounts map[string]*outport.AlteredAccount) error { - countCalled++ - return nil - }, - }, - &mock.MarshalizerMock{}, - &outport.ArgsSaveBlockData{ - Header: &dataBlock.Header{}, - Body: &dataBlock.Body{MiniBlocks: []*dataBlock.MiniBlock{{}}}, - TransactionsPool: &outport.Pool{}, - }, - ) - require.False(t, itemBlock.IsInterfaceNil()) - - err := itemBlock.Save() - require.NoError(t, err) - require.Equal(t, 3, countCalled) -} - -func TestComputeSizeOfTxsDuration(t *testing.T) { - res := testing.Benchmark(benchmarkComputeSizeOfTxsDuration) - - fmt.Println("Time to calculate size of txs :", time.Duration(res.NsPerOp())) -} - -func benchmarkComputeSizeOfTxsDuration(b *testing.B) { - numTxs := 20000 - txs := generateTxs(numTxs) - gogoMarsh := &marshal.GogoProtoMarshalizer{} - - for i := 0; i < b.N; i++ { - workItems.ComputeSizeOfTxs(gogoMarsh, &outport.Pool{Txs: txs}) - } -} - -func TestComputeSizeOfTxs(t *testing.T) { - const kb = 1024 - numTxs := 20000 - - txs := generateTxs(numTxs) - gogoMarsh := &marshal.GogoProtoMarshalizer{} - lenTxs := workItems.ComputeSizeOfTxs(gogoMarsh, &outport.Pool{Txs: txs}) - - keys := reflect.ValueOf(txs).MapKeys() - oneTxBytes, _ := gogoMarsh.Marshal(txs[keys[0].String()].GetTxHandler()) - oneTxSize := len(oneTxBytes) - expectedSize := numTxs * oneTxSize - expectedSizeDeltaPlus := expectedSize + int(0.01*float64(expectedSize)) - expectedSizeDeltaMinus := expectedSize - int(0.01*float64(expectedSize)) - - require.Greater(t, lenTxs, expectedSizeDeltaMinus) - require.Less(t, lenTxs, expectedSizeDeltaPlus) - fmt.Printf("Size of %d transactions : %d Kbs \n", numTxs, lenTxs/kb) -} diff --git a/process/dataindexer/workItems/workItemRating.go b/process/dataindexer/workItems/workItemRating.go deleted file mode 100644 index 8cac1ce8..00000000 --- a/process/dataindexer/workItems/workItemRating.go +++ /dev/null @@ -1,34 +0,0 @@ -package workItems - -import "github.com/multiversx/mx-chain-es-indexer-go/data" - -type itemRating struct { - indexer saveRatingIndexer - indexID string - infoRating []*data.ValidatorRatingInfo -} - -// NewItemRating will create a new instance of itemRating -func NewItemRating(indexer saveRatingIndexer, indexID string, infoRating []*data.ValidatorRatingInfo) WorkItemHandler { - return &itemRating{ - indexer: indexer, - indexID: indexID, - infoRating: infoRating, - } -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wir *itemRating) IsInterfaceNil() bool { - return wir == nil -} - -// Save will save validators rating in elasticsearch database -func (wir *itemRating) Save() error { - err := wir.indexer.SaveValidatorsRating(wir.indexID, wir.infoRating) - if err != nil { - log.Warn("itemRating.Save", "could not index validators rating", err.Error()) - return err - } - - return nil -} diff --git a/process/dataindexer/workItems/workItemRating_test.go b/process/dataindexer/workItems/workItemRating_test.go deleted file mode 100644 index 502e84ec..00000000 --- a/process/dataindexer/workItems/workItemRating_test.go +++ /dev/null @@ -1,54 +0,0 @@ -package workItems_test - -import ( - "errors" - "testing" - - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestItemRating_Save(t *testing.T) { - id := "0_1" - called := false - itemRating := workItems.NewItemRating( - &mock.ElasticProcessorStub{ - SaveValidatorsRatingCalled: func(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error { - require.Equal(t, id, index) - called = true - return nil - }, - }, - id, - []*data.ValidatorRatingInfo{ - {PublicKey: "pub-key", Rating: 100}, - }, - ) - require.False(t, itemRating.IsInterfaceNil()) - - err := itemRating.Save() - require.NoError(t, err) - require.True(t, called) -} - -func TestItemRating_SaveShouldErr(t *testing.T) { - id := "0_1" - localErr := errors.New("local err") - itemRating := workItems.NewItemRating( - &mock.ElasticProcessorStub{ - SaveValidatorsRatingCalled: func(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error { - return localErr - }, - }, - id, - []*data.ValidatorRatingInfo{ - {PublicKey: "pub-key", Rating: 100}, - }, - ) - require.False(t, itemRating.IsInterfaceNil()) - - err := itemRating.Save() - require.Equal(t, localErr, err) -} diff --git a/process/dataindexer/workItems/workItemRemoveBlock.go b/process/dataindexer/workItems/workItemRemoveBlock.go deleted file mode 100644 index 1e0497f8..00000000 --- a/process/dataindexer/workItems/workItemRemoveBlock.go +++ /dev/null @@ -1,55 +0,0 @@ -package workItems - -import ( - "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" -) - -type itemRemoveBlock struct { - indexer removeIndexer - bodyHandler data.BodyHandler - headerHandler data.HeaderHandler -} - -// NewItemRemoveBlock will create a new instance of itemRemoveBlock -func NewItemRemoveBlock( - indexer removeIndexer, - bodyHandler data.BodyHandler, - headerHandler data.HeaderHandler, -) WorkItemHandler { - return &itemRemoveBlock{ - indexer: indexer, - bodyHandler: bodyHandler, - headerHandler: headerHandler, - } -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wirb *itemRemoveBlock) IsInterfaceNil() bool { - return wirb == nil -} - -// Save will remove a block and miniblocks from elasticsearch database -func (wirb *itemRemoveBlock) Save() error { - err := wirb.indexer.RemoveHeader(wirb.headerHandler) - if err != nil { - return err - } - - body, ok := wirb.bodyHandler.(*block.Body) - if !ok { - return ErrBodyTypeAssertion - } - - err = wirb.indexer.RemoveMiniblocks(wirb.headerHandler, body) - if err != nil { - return err - } - - err = wirb.indexer.RemoveTransactions(wirb.headerHandler, body) - if err != nil { - return err - } - - return wirb.indexer.RemoveAccountsESDT(wirb.headerHandler.GetTimeStamp(), wirb.headerHandler.GetShardID()) -} diff --git a/process/dataindexer/workItems/workItemRemoveBlock_test.go b/process/dataindexer/workItems/workItemRemoveBlock_test.go deleted file mode 100644 index 2648fca0..00000000 --- a/process/dataindexer/workItems/workItemRemoveBlock_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package workItems_test - -import ( - "errors" - "testing" - - "github.com/multiversx/mx-chain-core-go/data" - dataBlock "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestItemRemoveBlock_Save(t *testing.T) { - countCalled := 0 - itemRemove := workItems.NewItemRemoveBlock( - &mock.ElasticProcessorStub{ - RemoveHeaderCalled: func(header data.HeaderHandler) error { - countCalled++ - return nil - }, - RemoveMiniblocksCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - countCalled++ - return nil - }, - RemoveTransactionsCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - countCalled++ - return nil - }, - }, - &dataBlock.Body{}, - &dataBlock.Header{}, - ) - require.False(t, itemRemove.IsInterfaceNil()) - - err := itemRemove.Save() - require.NoError(t, err) - require.Equal(t, 3, countCalled) -} - -func TestItemRemoveBlock_SaveRemoveHeaderShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemRemove := workItems.NewItemRemoveBlock( - &mock.ElasticProcessorStub{ - RemoveHeaderCalled: func(header data.HeaderHandler) error { - return localErr - }, - }, - &dataBlock.Body{}, - &dataBlock.Header{}, - ) - require.False(t, itemRemove.IsInterfaceNil()) - - err := itemRemove.Save() - require.Equal(t, localErr, err) -} - -func TestItemRemoveBlock_SaveRemoveMiniblocksShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemRemove := workItems.NewItemRemoveBlock( - &mock.ElasticProcessorStub{ - RemoveMiniblocksCalled: func(header data.HeaderHandler, body *dataBlock.Body) error { - return localErr - }, - }, - &dataBlock.Body{}, - &dataBlock.Header{}, - ) - require.False(t, itemRemove.IsInterfaceNil()) - - err := itemRemove.Save() - require.Equal(t, localErr, err) -} diff --git a/process/dataindexer/workItems/workItemRounds.go b/process/dataindexer/workItems/workItemRounds.go deleted file mode 100644 index 11349b2a..00000000 --- a/process/dataindexer/workItems/workItemRounds.go +++ /dev/null @@ -1,34 +0,0 @@ -package workItems - -import ( - "github.com/multiversx/mx-chain-es-indexer-go/data" -) - -type itemRounds struct { - indexer saveRounds - roundsInfo []*data.RoundInfo -} - -// NewItemRounds will create a new instance of itemRounds -func NewItemRounds(indexer saveRounds, roundsInfo []*data.RoundInfo) WorkItemHandler { - return &itemRounds{ - indexer: indexer, - roundsInfo: roundsInfo, - } -} - -// Save will save in elasticsearch database information about rounds -func (wir *itemRounds) Save() error { - err := wir.indexer.SaveRoundsInfo(wir.roundsInfo) - if err != nil { - log.Warn("itemRounds.Save", "could not index rounds info", err.Error()) - return err - } - - return nil -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wir *itemRounds) IsInterfaceNil() bool { - return wir == nil -} diff --git a/process/dataindexer/workItems/workItemRounds_test.go b/process/dataindexer/workItems/workItemRounds_test.go deleted file mode 100644 index 6b377690..00000000 --- a/process/dataindexer/workItems/workItemRounds_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package workItems_test - -import ( - "errors" - "testing" - - "github.com/multiversx/mx-chain-es-indexer-go/data" - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestItemRounds_Save(t *testing.T) { - called := false - itemRounds := workItems.NewItemRounds( - &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - called = true - return nil - }, - }, - []*data.RoundInfo{ - {}, - }, - ) - require.False(t, itemRounds.IsInterfaceNil()) - - err := itemRounds.Save() - require.NoError(t, err) - require.True(t, called) -} - -func TestItemRounds_SaveRoundsShouldErr(t *testing.T) { - localErr := errors.New("local err") - itemRounds := workItems.NewItemRounds( - &mock.ElasticProcessorStub{ - SaveRoundsInfoCalled: func(infos []*data.RoundInfo) error { - return localErr - }, - }, - []*data.RoundInfo{ - {}, - }, - ) - require.False(t, itemRounds.IsInterfaceNil()) - - err := itemRounds.Save() - require.Equal(t, localErr, err) -} diff --git a/process/dataindexer/workItems/workItemValidators.go b/process/dataindexer/workItems/workItemValidators.go deleted file mode 100644 index c6f424f4..00000000 --- a/process/dataindexer/workItems/workItemValidators.go +++ /dev/null @@ -1,40 +0,0 @@ -package workItems - -type itemValidators struct { - indexer saveValidatorsIndexer - epoch uint32 - validatorsPubKeys map[uint32][][]byte -} - -// NewItemValidators will create a new instance of itemValidators -func NewItemValidators( - indexer saveValidatorsIndexer, - epoch uint32, - validatorsPubKeys map[uint32][][]byte, -) WorkItemHandler { - return &itemValidators{ - indexer: indexer, - epoch: epoch, - validatorsPubKeys: validatorsPubKeys, - } -} - -// Save will save information about validators -func (wiv *itemValidators) Save() error { - for shardID, shardPubKeys := range wiv.validatorsPubKeys { - err := wiv.indexer.SaveShardValidatorsPubKeys(shardID, wiv.epoch, shardPubKeys) - if err != nil { - log.Warn("itemValidators.Save could not index validators public keys", - "for shard", shardID, - "error", err.Error()) - return err - } - } - - return nil -} - -// IsInterfaceNil returns true if there is no value under the interface -func (wiv *itemValidators) IsInterfaceNil() bool { - return wiv == nil -} diff --git a/process/dataindexer/workItems/workItemValidators_test.go b/process/dataindexer/workItems/workItemValidators_test.go deleted file mode 100644 index 10ed89a0..00000000 --- a/process/dataindexer/workItems/workItemValidators_test.go +++ /dev/null @@ -1,52 +0,0 @@ -package workItems_test - -import ( - "errors" - "testing" - - "github.com/multiversx/mx-chain-es-indexer-go/mock" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer/workItems" - "github.com/stretchr/testify/require" -) - -func TestItemValidators_Save(t *testing.T) { - called := false - validators := map[uint32][][]byte{ - 0: {[]byte("val1"), []byte("val2")}, - } - itemValidators := workItems.NewItemValidators( - &mock.ElasticProcessorStub{ - SaveShardValidatorsPubKeysCalled: func(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error { - called = true - return nil - }, - }, - 1, - validators, - ) - require.False(t, itemValidators.IsInterfaceNil()) - - err := itemValidators.Save() - require.NoError(t, err) - require.True(t, called) -} - -func TestItemValidators_SaveValidatorsShouldErr(t *testing.T) { - localErr := errors.New("local err") - validators := map[uint32][][]byte{ - 0: {[]byte("val1"), []byte("val2")}, - } - itemValidators := workItems.NewItemValidators( - &mock.ElasticProcessorStub{ - SaveShardValidatorsPubKeysCalled: func(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error { - return localErr - }, - }, - 1, - validators, - ) - require.False(t, itemValidators.IsInterfaceNil()) - - err := itemValidators.Save() - require.Equal(t, localErr, err) -} diff --git a/process/elasticproc/accounts/accountsProcessor.go b/process/elasticproc/accounts/accountsProcessor.go index 935a855a..1c24bfea 100644 --- a/process/elasticproc/accounts/accountsProcessor.go +++ b/process/elasticproc/accounts/accountsProcessor.go @@ -8,7 +8,7 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" @@ -42,7 +42,7 @@ func NewAccountsProcessor( } // GetAccounts will get accounts for regular operations and esdt operations -func (ap *accountsProcessor) GetAccounts(coreAlteredAccounts map[string]*outport.AlteredAccount) ([]*data.Account, []*data.AccountESDT) { +func (ap *accountsProcessor) GetAccounts(coreAlteredAccounts map[string]*alteredAccount.AlteredAccount) ([]*data.Account, []*data.AccountESDT) { regularAccountsToIndex := make([]*data.Account, 0) accountsToIndexESDT := make([]*data.AccountESDT, 0) @@ -57,7 +57,7 @@ func (ap *accountsProcessor) GetAccounts(coreAlteredAccounts map[string]*outport } func splitAlteredAccounts( - account *outport.AlteredAccount, + account *alteredAccount.AlteredAccount, ) ([]*data.Account, []*data.AccountESDT) { regularAccountsToIndex := make([]*data.Account, 0) accountsToIndexESDT := make([]*data.AccountESDT, 0) @@ -123,16 +123,14 @@ func (ap *accountsProcessor) PrepareRegularAccountsMap(timestamp uint64, account } acc := &data.AccountInfo{ - Address: address, - Nonce: userAccount.UserAccount.Nonce, - Balance: converters.BigIntToString(balance), - BalanceNum: balanceAsFloat, - IsSender: userAccount.IsSender, - IsSmartContract: core.IsSmartContractAddress(addressBytes), - TotalBalanceWithStake: converters.BigIntToString(balance), - TotalBalanceWithStakeNum: balanceAsFloat, - Timestamp: time.Duration(timestamp), - ShardID: shardID, + Address: address, + Nonce: userAccount.UserAccount.Nonce, + Balance: converters.BigIntToString(balance), + BalanceNum: balanceAsFloat, + IsSender: userAccount.IsSender, + IsSmartContract: core.IsSmartContractAddress(addressBytes), + Timestamp: time.Duration(timestamp), + ShardID: shardID, } ap.addAdditionalDataInAccount(userAccount.UserAccount.AdditionalData, acc) @@ -143,18 +141,21 @@ func (ap *accountsProcessor) PrepareRegularAccountsMap(timestamp uint64, account return accountsMap } -func (ap *accountsProcessor) addAdditionalDataInAccount(additionalData *outport.AdditionalAccountData, account *data.AccountInfo) { +func (ap *accountsProcessor) addAdditionalDataInAccount(additionalData *alteredAccount.AdditionalAccountData, account *data.AccountInfo) { if additionalData == nil { return } account.UserName = additionalData.UserName account.CurrentOwner = additionalData.CurrentOwner + account.RootHash = additionalData.RootHash + account.CodeHash = additionalData.CodeHash + account.CodeMetadata = additionalData.CodeMetadata ap.addDeveloperRewardsInAccount(additionalData, account) } -func (ap *accountsProcessor) addDeveloperRewardsInAccount(additionalData *outport.AdditionalAccountData, account *data.AccountInfo) { +func (ap *accountsProcessor) addDeveloperRewardsInAccount(additionalData *alteredAccount.AdditionalAccountData, account *data.AccountInfo) { if additionalData.DeveloperRewards == "" { return } @@ -205,7 +206,7 @@ func (ap *accountsProcessor) PrepareAccountsMapESDT( } tokenIdentifier := converters.ComputeTokenIdentifier(accountESDT.TokenIdentifier, accountESDT.NFTNonce) - balanceNum, err := ap.balanceConverter.ComputeESDTBalanceAsFloat(balance) + balanceNum, err := ap.balanceConverter.ConvertBigValueToFloat(balance) if err != nil { log.Warn("accountsProcessor.PrepareAccountsMapESDT: cannot compute esdt balance as num", "balance", balance, "address", address, "error", err, "token", tokenIdentifier) @@ -281,7 +282,7 @@ func (ap *accountsProcessor) getESDTInfo(accountESDT *data.AccountESDT) (*big.In return big.NewInt(0), "", nil, nil } - accountTokenData := &outport.AccountTokenData{} + accountTokenData := &alteredAccount.AccountTokenData{} for _, tokenData := range accountESDT.Account.Tokens { if tokenData.Identifier == accountESDT.TokenIdentifier && tokenData.Nonce == accountESDT.NFTNonce { accountTokenData = tokenData @@ -299,7 +300,7 @@ func (ap *accountsProcessor) getESDTInfo(accountESDT *data.AccountESDT) (*big.In } // PutTokenMedataDataInTokens will put the TokenMedata in provided tokens data -func (ap *accountsProcessor) PutTokenMedataDataInTokens(tokensData []*data.TokenInfo, coreAlteredAccounts map[string]*outport.AlteredAccount) { +func (ap *accountsProcessor) PutTokenMedataDataInTokens(tokensData []*data.TokenInfo, coreAlteredAccounts map[string]*alteredAccount.AlteredAccount) { for _, tokenData := range tokensData { if tokenData.Data != nil || tokenData.Nonce == 0 { continue @@ -320,8 +321,8 @@ func (ap *accountsProcessor) PutTokenMedataDataInTokens(tokensData []*data.Token func (ap *accountsProcessor) loadMetadataForToken( tokenData *data.TokenInfo, - coreAlteredAccounts map[string]*outport.AlteredAccount, -) (*outport.TokenMetaData, error) { + coreAlteredAccounts map[string]*alteredAccount.AlteredAccount, +) (*alteredAccount.TokenMetaData, error) { for _, account := range coreAlteredAccounts { for _, token := range account.Tokens { if tokenData.Token == token.Identifier && tokenData.Nonce == token.Nonce { diff --git a/process/elasticproc/accounts/accountsProcessor_test.go b/process/elasticproc/accounts/accountsProcessor_test.go index 7fe166a0..b9c8b7bd 100644 --- a/process/elasticproc/accounts/accountsProcessor_test.go +++ b/process/elasticproc/accounts/accountsProcessor_test.go @@ -9,7 +9,7 @@ import ( "time" "github.com/multiversx/mx-chain-core-go/core" - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/mock" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" @@ -86,11 +86,11 @@ func TestGetESDTInfo(t *testing.T) { tokenIdentifier := "token-001" wrapAccount := &data.AccountESDT{ - Account: &outport.AlteredAccount{ + Account: &alteredAccount.AlteredAccount{ Address: "", Balance: "1000", Nonce: 0, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: tokenIdentifier, Balance: "1000", @@ -114,11 +114,11 @@ func TestGetESDTInfoNFT(t *testing.T) { tokenIdentifier := "token-001" wrapAccount := &data.AccountESDT{ - Account: &outport.AlteredAccount{ + Account: &alteredAccount.AlteredAccount{ Address: "", Balance: "1", Nonce: 10, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: tokenIdentifier, Balance: "1", @@ -149,17 +149,17 @@ func TestGetESDTInfoNFTWithMetaData(t *testing.T) { tokenIdentifier := "token-001" wrapAccount := &data.AccountESDT{ - Account: &outport.AlteredAccount{ + Account: &alteredAccount.AlteredAccount{ Address: "", Balance: "1", Nonce: 1, - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: tokenIdentifier, Balance: "1", Properties: "6f6b", Nonce: 10, - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Nonce: 10, Name: nftName, Creator: creator, @@ -188,8 +188,8 @@ func TestAccountsProcessor_GetAccountsEGLDAccounts(t *testing.T) { t.Parallel() addr := "aaaabbbb" - acc := &outport.AlteredAccount{} - alteredAccountsMap := map[string]*outport.AlteredAccount{ + acc := &alteredAccount.AlteredAccount{} + alteredAccountsMap := map[string]*alteredAccount.AlteredAccount{ addr: acc, } ap, _ := NewAccountsProcessor(mock.NewPubkeyConverterMock(32), balanceConverter) @@ -208,16 +208,16 @@ func TestAccountsProcessor_GetAccountsESDTAccount(t *testing.T) { t.Parallel() addr := "aaaabbbb" - acc := &outport.AlteredAccount{ + acc := &alteredAccount.AlteredAccount{ Address: addr, Balance: "1", - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "token", }, }, } - alteredAccountsMap := map[string]*outport.AlteredAccount{ + alteredAccountsMap := map[string]*alteredAccount.AlteredAccount{ addr: acc, } ap, _ := NewAccountsProcessor(mock.NewPubkeyConverterMock(32), balanceConverter) @@ -234,14 +234,14 @@ func TestAccountsProcessor_GetAccountsESDTAccountNewAccountShouldBeInRegularAcco t.Parallel() addr := "aaaabbbb" - acc := &outport.AlteredAccount{ - Tokens: []*outport.AccountTokenData{ + acc := &alteredAccount.AlteredAccount{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "token", }, }, } - alteredAccountsMap := map[string]*outport.AlteredAccount{ + alteredAccountsMap := map[string]*alteredAccount.AlteredAccount{ addr: acc, } ap, _ := NewAccountsProcessor(mock.NewPubkeyConverterMock(32), balanceConverter) @@ -264,10 +264,15 @@ func TestAccountsProcessor_PrepareAccountsMapEGLD(t *testing.T) { addrBytes := bytes.Repeat([]byte{0}, 32) addr := hex.EncodeToString(addrBytes) - account := &outport.AlteredAccount{ + account := &alteredAccount.AlteredAccount{ Address: addr, Balance: "1000", Nonce: 1, + AdditionalData: &alteredAccount.AdditionalAccountData{ + CodeHash: []byte("code"), + CodeMetadata: []byte("metadata"), + RootHash: []byte("root"), + }, } egldAccount := &data.Account{ @@ -282,14 +287,15 @@ func TestAccountsProcessor_PrepareAccountsMapEGLD(t *testing.T) { res := ap.PrepareRegularAccountsMap(123, []*data.Account{egldAccount}, 0) require.Equal(t, &data.AccountInfo{ - Address: addr, - Nonce: 1, - Balance: "1000", - BalanceNum: balanceNum, - TotalBalanceWithStake: "1000", - TotalBalanceWithStakeNum: balanceNum, - IsSmartContract: true, - Timestamp: time.Duration(123), + Address: addr, + Nonce: 1, + Balance: "1000", + BalanceNum: balanceNum, + IsSmartContract: true, + Timestamp: time.Duration(123), + CodeHash: []byte("code"), + CodeMetadata: []byte("metadata"), + RootHash: []byte("root"), }, res[addr]) } @@ -299,15 +305,15 @@ func TestAccountsProcessor_PrepareAccountsMapESDT(t *testing.T) { addr := "aaaabbbb" - account := &outport.AlteredAccount{ + account := &alteredAccount.AlteredAccount{ Address: hex.EncodeToString([]byte(addr)), - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Balance: "1000", Identifier: "token", Nonce: 15, Properties: "3032", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "creator", }, }, @@ -316,7 +322,7 @@ func TestAccountsProcessor_PrepareAccountsMapESDT(t *testing.T) { Identifier: "token", Nonce: 16, Properties: "3032", - MetaData: &outport.TokenMetaData{ + MetaData: &alteredAccount.TokenMetaData{ Creator: "creator", }, }, @@ -406,7 +412,7 @@ func TestAccountsProcessor_PutTokenMedataDataInTokens(t *testing.T) { tokensInfo := []*data.TokenInfo{ {Data: nil}, {Nonce: 5, Data: &data.TokenMetaData{Creator: oldCreator}}, } - emptyAlteredAccounts := map[string]*outport.AlteredAccount{} + emptyAlteredAccounts := map[string]*alteredAccount.AlteredAccount{} ap.PutTokenMedataDataInTokens(tokensInfo, emptyAlteredAccounts) require.Empty(t, emptyAlteredAccounts) require.Empty(t, tokensInfo[0].Data) @@ -426,8 +432,8 @@ func TestAccountsProcessor_PutTokenMedataDataInTokens(t *testing.T) { }, } - alteredAccounts := map[string]*outport.AlteredAccount{ - "addr": {Tokens: []*outport.AccountTokenData{}}, + alteredAccounts := map[string]*alteredAccount.AlteredAccount{ + "addr": {Tokens: []*alteredAccount.AccountTokenData{}}, } ap.PutTokenMedataDataInTokens(tokensInfo, alteredAccounts) require.Empty(t, tokensInfo[0].Data) @@ -438,7 +444,7 @@ func TestAccountsProcessor_PutTokenMedataDataInTokens(t *testing.T) { ap, _ := NewAccountsProcessor(mock.NewPubkeyConverterMock(32), balanceConverter) - metadata0, metadata1 := &outport.TokenMetaData{Creator: "creator 0"}, &outport.TokenMetaData{Creator: "creator 1"} + metadata0, metadata1 := &alteredAccount.TokenMetaData{Creator: "creator 0"}, &alteredAccount.TokenMetaData{Creator: "creator 1"} tokensInfo := []*data.TokenInfo{ { Nonce: 5, @@ -452,9 +458,9 @@ func TestAccountsProcessor_PutTokenMedataDataInTokens(t *testing.T) { }, } - alteredAccounts := map[string]*outport.AlteredAccount{ + alteredAccounts := map[string]*alteredAccount.AlteredAccount{ "addr0": { - Tokens: []*outport.AccountTokenData{ + Tokens: []*alteredAccount.AccountTokenData{ { Identifier: "token0-5t6y7u", Nonce: 5, @@ -481,14 +487,14 @@ func TestAddAdditionalDataIntoAccounts(t *testing.T) { ap, _ := NewAccountsProcessor(mock.NewPubkeyConverterMock(32), balanceConverter) account := &data.AccountInfo{} - ap.addAdditionalDataInAccount(&outport.AdditionalAccountData{ + ap.addAdditionalDataInAccount(&alteredAccount.AdditionalAccountData{ DeveloperRewards: "10000", }, account) require.Equal(t, "10000", account.DeveloperRewards) require.Equal(t, 0.000001, account.DeveloperRewardsNum) account = &data.AccountInfo{} - ap.addAdditionalDataInAccount(&outport.AdditionalAccountData{ + ap.addAdditionalDataInAccount(&alteredAccount.AdditionalAccountData{ DeveloperRewards: "", }, account) require.Equal(t, "", account.DeveloperRewards) @@ -497,7 +503,7 @@ func TestAddAdditionalDataIntoAccounts(t *testing.T) { account = &data.AccountInfo{ Address: "addr", } - ap.addAdditionalDataInAccount(&outport.AdditionalAccountData{ + ap.addAdditionalDataInAccount(&alteredAccount.AdditionalAccountData{ DeveloperRewards: "wrong", }, account) require.Equal(t, "", account.DeveloperRewards) diff --git a/process/elasticproc/accounts/serialize_test.go b/process/elasticproc/accounts/serialize_test.go index 3a1eb965..52b0dbf4 100644 --- a/process/elasticproc/accounts/serialize_test.go +++ b/process/elasticproc/accounts/serialize_test.go @@ -39,14 +39,12 @@ func TestSerializeAccounts(t *testing.T) { accs := map[string]*data.AccountInfo{ "addr1": { - Address: "addr1", - Nonce: 1, - Balance: "50", - BalanceNum: 0.1, - TotalBalanceWithStake: "50", - TotalBalanceWithStakeNum: 0.1, - IsSmartContract: true, - IsSender: true, + Address: "addr1", + Nonce: 1, + Balance: "50", + BalanceNum: 0.1, + IsSmartContract: true, + IsSender: true, }, } @@ -56,7 +54,7 @@ func TestSerializeAccounts(t *testing.T) { require.Equal(t, 1, len(buffSlice.Buffers())) expectedRes := `{ "update" : {"_index": "accounts", "_id" : "addr1" } } -{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"totalBalanceWithStake":"50","totalBalanceWithStakeNum":0.1,"shardID":0} }},"upsert": {}} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.account} else {if (ctx._source.containsKey('timestamp')) {if (ctx._source.timestamp <= params.account.timestamp) {ctx._source = params.account}} else {ctx._source = params.account}}","lang": "painless","params": { "account": {"address":"addr1","nonce":1,"balance":"50","balanceNum":0.1,"shardID":0} }},"upsert": {}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/elasticproc/block/blockProcessor.go b/process/elasticproc/block/blockProcessor.go index f61477c6..c23e3e1b 100644 --- a/process/elasticproc/block/blockProcessor.go +++ b/process/elasticproc/block/blockProcessor.go @@ -2,6 +2,7 @@ package block import ( "encoding/hex" + "errors" "fmt" "strconv" "time" @@ -25,7 +26,11 @@ const ( notFound = -2 ) -var log = logger.GetOrCreate("indexer/process/block") +var ( + log = logger.GetOrCreate("indexer/process/block") + errNilBlockData = errors.New("nil block data") + errNilHeaderGasConsumed = errors.New("nil header gas consumed data") +) type blockProcessor struct { hasher hashing.Hasher @@ -48,62 +53,68 @@ func NewBlockProcessor(hasher hashing.Hasher, marshalizer marshal.Marshalizer) ( } // PrepareBlockForDB will prepare a database block and serialize it for database -func (bp *blockProcessor) PrepareBlockForDB( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - sizeTxs int, - pool *outport.Pool, -) (*data.Block, error) { - if check.IfNil(header) { +func (bp *blockProcessor) PrepareBlockForDB(obh *outport.OutportBlockWithHeader) (*data.Block, error) { + if check.IfNil(obh.Header) { return nil, indexer.ErrNilHeaderHandler } - if body == nil { + if obh.BlockData == nil { + return nil, errNilBlockData + } + if obh.BlockData.Body == nil { return nil, indexer.ErrNilBlockBody } + if obh.HeaderGasConsumption == nil { + return nil, errNilHeaderGasConsumed + } - blockSizeInBytes, err := bp.computeBlockSize(header, body) + blockSizeInBytes, err := bp.computeBlockSize(obh.BlockData.HeaderBytes, obh.BlockData.Body) if err != nil { return nil, err } - miniblocksHashes := bp.getEncodedMBSHashes(body) - leaderIndex := bp.getLeaderIndex(signersIndexes) + sizeTxs := computeSizeOfTransactions(obh.TransactionPool) + miniblocksHashes := bp.getEncodedMBSHashes(obh.BlockData.Body) + leaderIndex := bp.getLeaderIndex(obh.SignersIndexes) - numTxs, notarizedTxs := getTxsCount(header) + numTxs, notarizedTxs := getTxsCount(obh.Header) elasticBlock := &data.Block{ - Nonce: header.GetNonce(), - Round: header.GetRound(), - Epoch: header.GetEpoch(), - ShardID: header.GetShardID(), - Hash: hex.EncodeToString(headerHash), + Nonce: obh.Header.GetNonce(), + Round: obh.Header.GetRound(), + Epoch: obh.Header.GetEpoch(), + ShardID: obh.Header.GetShardID(), + Hash: hex.EncodeToString(obh.BlockData.HeaderHash), MiniBlocksHashes: miniblocksHashes, - NotarizedBlocksHashes: notarizedHeadersHashes, + NotarizedBlocksHashes: obh.NotarizedHeadersHashes, Proposer: leaderIndex, - Validators: signersIndexes, - PubKeyBitmap: hex.EncodeToString(header.GetPubKeysBitmap()), + Validators: obh.SignersIndexes, + PubKeyBitmap: hex.EncodeToString(obh.Header.GetPubKeysBitmap()), Size: int64(blockSizeInBytes), SizeTxs: int64(sizeTxs), - Timestamp: time.Duration(header.GetTimeStamp()), + Timestamp: time.Duration(obh.Header.GetTimeStamp()), TxCount: numTxs, NotarizedTxsCount: notarizedTxs, - StateRootHash: hex.EncodeToString(header.GetRootHash()), - PrevHash: hex.EncodeToString(header.GetPrevHash()), - SearchOrder: computeBlockSearchOrder(header), - EpochStartBlock: header.IsStartOfEpochBlock(), - GasProvided: gasConsumptionData.GasProvided, - GasRefunded: gasConsumptionData.GasRefunded, - GasPenalized: gasConsumptionData.GasPenalized, - MaxGasLimit: gasConsumptionData.MaxGasPerBlock, - AccumulatedFees: converters.BigIntToString(header.GetAccumulatedFees()), - DeveloperFees: converters.BigIntToString(header.GetDeveloperFees()), - } - - additionalData := header.GetAdditionalData() - if header.GetAdditionalData() != nil { + StateRootHash: hex.EncodeToString(obh.Header.GetRootHash()), + PrevHash: hex.EncodeToString(obh.Header.GetPrevHash()), + SearchOrder: computeBlockSearchOrder(obh.Header), + EpochStartBlock: obh.Header.IsStartOfEpochBlock(), + GasProvided: obh.HeaderGasConsumption.GasProvided, + GasRefunded: obh.HeaderGasConsumption.GasRefunded, + GasPenalized: obh.HeaderGasConsumption.GasPenalized, + MaxGasLimit: obh.HeaderGasConsumption.MaxGasPerBlock, + AccumulatedFees: converters.BigIntToString(obh.Header.GetAccumulatedFees()), + DeveloperFees: converters.BigIntToString(obh.Header.GetDeveloperFees()), + RandSeed: hex.EncodeToString(obh.Header.GetRandSeed()), + PrevRandSeed: hex.EncodeToString(obh.Header.GetPrevRandSeed()), + Signature: hex.EncodeToString(obh.Header.GetSignature()), + LeaderSignature: hex.EncodeToString(obh.Header.GetLeaderSignature()), + ChainID: string(obh.Header.GetChainID()), + SoftwareVersion: hex.EncodeToString(obh.Header.GetSoftwareVersion()), + ReceiptsHash: hex.EncodeToString(obh.Header.GetReceiptsHash()), + Reserved: obh.Header.GetReserved(), + } + + additionalData := obh.Header.GetAdditionalData() + if obh.Header.GetAdditionalData() != nil { elasticBlock.ScheduledData = &data.ScheduledData{ ScheduledRootHash: hex.EncodeToString(additionalData.GetScheduledRootHash()), ScheduledAccumulatedFees: converters.BigIntToString(additionalData.GetScheduledAccumulatedFees()), @@ -114,8 +125,8 @@ func (bp *blockProcessor) PrepareBlockForDB( } } - bp.addEpochStartInfoForMeta(header, elasticBlock) - putMiniblocksDetailsInBlock(header, elasticBlock, pool, body) + bp.addEpochStartInfoForMeta(obh.Header, elasticBlock) + putMiniblocksDetailsInBlock(obh.Header, elasticBlock, obh.TransactionPool, obh.BlockData.Body) return elasticBlock, nil } @@ -233,7 +244,7 @@ func (bp *blockProcessor) getEncodedMBSHashes(body *block.Body) []string { return miniblocksHashes } -func putMiniblocksDetailsInBlock(header coreData.HeaderHandler, block *data.Block, pool *outport.Pool, body *block.Body) { +func putMiniblocksDetailsInBlock(header coreData.HeaderHandler, block *data.Block, pool *outport.TransactionPool, body *block.Body) { mbHeaders := header.GetMiniBlockHeaderHandlers() for idx, mbHeader := range mbHeaders { @@ -257,8 +268,8 @@ func putMiniblocksDetailsInBlock(header coreData.HeaderHandler, block *data.Bloc } } -func extractExecutionOrderIndicesFromPool(mbHeader coreData.MiniBlockHeaderHandler, txsHashes [][]byte, pool *outport.Pool) []int { - txsMap := getTxsMap(nodeBlock.Type(mbHeader.GetTypeInt32()), pool) +func extractExecutionOrderIndicesFromPool(mbHeader coreData.MiniBlockHeaderHandler, txsHashes [][]byte, pool *outport.TransactionPool) []int { + mbType := mbHeader.GetTypeInt32() executionOrderTxsIndices := make([]int, len(txsHashes)) indexOfFirstTxProcessed, indexOfLastTxProcessed := mbHeader.GetIndexOfFirstTxProcessed(), mbHeader.GetIndexOfLastTxProcessed() for idx, txHash := range txsHashes { @@ -268,24 +279,47 @@ func extractExecutionOrderIndicesFromPool(mbHeader coreData.MiniBlockHeaderHandl continue } - tx, found := txsMap[string(txHash)] + executionOrder, found := getExecutionOrderForTx(txHash, mbType, pool) if !found { log.Warn("blockProcessor.extractExecutionOrderIndicesFromPool cannot find tx in pool", "txHash", hex.EncodeToString(txHash)) executionOrderTxsIndices[idx] = notFound continue } - executionOrderTxsIndices[idx] = tx.GetExecutionOrder() + executionOrderTxsIndices[idx] = int(executionOrder) } return executionOrderTxsIndices } -func (bp *blockProcessor) computeBlockSize(header coreData.HeaderHandler, body *block.Body) (int, error) { - headerBytes, err := bp.marshalizer.Marshal(header) - if err != nil { - return 0, err +type executionOrderHandler interface { + GetExecutionOrder() uint32 +} + +func getExecutionOrderForTx(txHash []byte, mbType int32, pool *outport.TransactionPool) (uint32, bool) { + var tx executionOrderHandler + var found bool + + switch nodeBlock.Type(mbType) { + case nodeBlock.TxBlock: + tx, found = pool.Transactions[hex.EncodeToString(txHash)] + case nodeBlock.InvalidBlock: + tx, found = pool.InvalidTxs[hex.EncodeToString(txHash)] + case nodeBlock.RewardsBlock: + tx, found = pool.Rewards[hex.EncodeToString(txHash)] + case nodeBlock.SmartContractResultBlock: + tx, found = pool.SmartContractResults[hex.EncodeToString(txHash)] + default: + return 0, false } + + if !found { + return 0, false + } + return tx.GetExecutionOrder(), true +} + +func (bp *blockProcessor) computeBlockSize(headerBytes []byte, body *block.Body) (int, error) { bodyBytes, err := bp.marshalizer.Marshal(body) if err != nil { return 0, err @@ -332,21 +366,6 @@ func (bp *blockProcessor) ComputeHeaderHash(header coreData.HeaderHandler) ([]by return core.CalculateHash(bp.marshalizer, bp.hasher, header) } -func getTxsMap(mbType nodeBlock.Type, pool *outport.Pool) map[string]coreData.TransactionHandlerWithGasUsedAndFee { - switch mbType { - case nodeBlock.TxBlock: - return pool.Txs - case nodeBlock.InvalidBlock: - return pool.Invalid - case nodeBlock.RewardsBlock: - return pool.Rewards - case nodeBlock.SmartContractResultBlock: - return pool.Scrs - default: - return make(map[string]coreData.TransactionHandlerWithGasUsedAndFee) - } -} - func hexEncodeSlice(slice [][]byte) []string { res := make([]string, 0, len(slice)) for _, s := range slice { @@ -354,3 +373,25 @@ func hexEncodeSlice(slice [][]byte) []string { } return res } + +func computeSizeOfTransactions(pool *outport.TransactionPool) int { + if pool == nil { + return 0 + } + + txsSize := 0 + for _, txInfo := range pool.Transactions { + txsSize += txInfo.Transaction.Size() + } + for _, rewardInfo := range pool.Rewards { + txsSize += rewardInfo.Reward.Size() + } + for _, invalidTxInfo := range pool.InvalidTxs { + txsSize += invalidTxInfo.Transaction.Size() + } + for _, scrInfo := range pool.SmartContractResults { + txsSize += scrInfo.SmartContractResult.Size() + } + + return txsSize +} diff --git a/process/elasticproc/block/blockProcessor_test.go b/process/elasticproc/block/blockProcessor_test.go index 14b05780..d727fc1f 100644 --- a/process/elasticproc/block/blockProcessor_test.go +++ b/process/elasticproc/block/blockProcessor_test.go @@ -7,9 +7,11 @@ import ( "testing" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/rewardTx" + "github.com/multiversx/mx-chain-core-go/data/smartContractResult" + "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" "github.com/multiversx/mx-chain-es-indexer-go/data" @@ -60,20 +62,37 @@ func TestBlockProcessor_PrepareBlockForDBShouldWork(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB( - []byte("hash"), - &dataBlock.Header{}, - []uint64{0, 1, 2}, - &dataBlock.Body{ - MiniBlocks: dataBlock.MiniBlockSlice{ - { - ReceiverShardID: 1, - }, - { - ReceiverShardID: 2, + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: &dataBlock.Header{ + RandSeed: []byte("randSeed"), + PrevRandSeed: []byte("prevRandSeed"), + Signature: []byte("signature"), + LeaderSignature: []byte("leaderSignature"), + ChainID: []byte("1"), + SoftwareVersion: []byte("1"), + ReceiptsHash: []byte("hash"), + Reserved: []byte("reserved"), + }, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderHash: []byte("hash"), + Body: &dataBlock.Body{ + MiniBlocks: dataBlock.MiniBlockSlice{ + { + ReceiverShardID: 1, + }, + { + ReceiverShardID: 2, + }, + }, }, }, - }, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + SignersIndexes: []uint64{0, 1, 2}, + TransactionPool: &outport.TransactionPool{}, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, + }, + } + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Nil(t, err) expectedBlock := &data.Block{ @@ -81,11 +100,19 @@ func TestBlockProcessor_PrepareBlockForDBShouldWork(t *testing.T) { Validators: []uint64{0x0, 0x1, 0x2}, EpochStartBlock: false, SearchOrder: 0x3fc, - MiniBlocksHashes: []string{"c57392e53257b4861f5e406349a8deb89c6dbc2127564ee891a41a188edbf01a", "28fda294dc987e5099d75e53cd6f87a9a42b96d55242a634385b5d41175c0c21"}, + MiniBlocksHashes: []string{"0796d34e8d443fd31bf4d9ec4051421b4d5d0e8c1db9ff942d6f4dc3a9ca2803", "4cc379ab1f0aef6602e85a0a7ffabb5bc9a2ba646dc0fd720028e06527bf873f"}, NotarizedBlocksHashes: []string(nil), - Size: 104, + Size: 114, AccumulatedFees: "0", DeveloperFees: "0", + RandSeed: "72616e6453656564", + PrevRandSeed: "7072657652616e6453656564", + Signature: "7369676e6174757265", + LeaderSignature: "6c65616465725369676e6174757265", + ChainID: "1", + SoftwareVersion: "31", + ReceiptsHash: "68617368", + Reserved: []byte("reserved"), } require.Equal(t, expectedBlock, dbBlock) } @@ -95,7 +122,8 @@ func TestBlockProcessor_PrepareBlockForDBNilHeader(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), nil, nil, &dataBlock.Body{}, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + outportBlockWithHeader := &outport.OutportBlockWithHeader{} + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Equal(t, indexer.ErrNilHeaderHandler, err) require.Nil(t, dbBlock) } @@ -105,7 +133,13 @@ func TestBlockProcessor_PrepareBlockForDBNilBody(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, nil, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: &dataBlock.MetaBlock{}, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{}, + }, + } + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Equal(t, indexer.ErrNilBlockBody, err) require.Nil(t, dbBlock) } @@ -120,7 +154,19 @@ func TestBlockProcessor_PrepareBlockForDBMarshalFailHeader(t *testing.T) { }, }) - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: &dataBlock.Header{}, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderHash: []byte("hash"), + Body: &dataBlock.Body{}, + }, + SignersIndexes: []uint64{0, 1, 2}, + TransactionPool: &outport.TransactionPool{}, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, + }, + } + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Equal(t, expectedErr, err) require.Nil(t, dbBlock) } @@ -128,21 +174,26 @@ func TestBlockProcessor_PrepareBlockForDBMarshalFailHeader(t *testing.T) { func TestBlockProcessor_PrepareBlockForDBMarshalFailBlock(t *testing.T) { t.Parallel() - count := 0 expectedErr := errors.New("local error") bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{ MarshalCalled: func(obj interface{}) ([]byte, error) { - defer func() { - count++ - }() - if count > 0 { - return nil, expectedErr - } - return nil, nil + return nil, expectedErr }, }) - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{}, nil, &dataBlock.Body{}, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: &dataBlock.Header{}, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderHash: []byte("hash"), + Body: &dataBlock.Body{}, + }, + SignersIndexes: []uint64{0, 1, 2}, + TransactionPool: &outport.TransactionPool{}, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, + }, + } + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Equal(t, expectedErr, err) require.Nil(t, dbBlock) } @@ -155,7 +206,7 @@ func TestBlockProcessor_ComputeHeaderHash(t *testing.T) { header := &dataBlock.Header{} hashBytes, err := bp.ComputeHeaderHash(header) require.Nil(t, err) - require.Equal(t, "c7c81a1b22b67680f35837b474387ddfe10f67e104034c80f94ab9e5a0a089fb", hex.EncodeToString(hashBytes)) + require.Equal(t, "96f7d09988eafbc99b45dfce0eaf9df1d02def2ae678d88bd154ebffa3247b2a", hex.EncodeToString(hashBytes)) } func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { @@ -163,7 +214,7 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { bp, _ := NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.MetaBlock{ + header := &dataBlock.MetaBlock{ TxCount: 1000, EpochStart: dataBlock.EpochStart{ LastFinalizedHeaders: []dataBlock.EpochStartShardData{{ @@ -205,24 +256,40 @@ func TestBlockProcessor_PrepareBlockForDBEpochStartMeta(t *testing.T) { TxCount: 120, }, }, - }, nil, &dataBlock.Body{ - MiniBlocks: []*dataBlock.MiniBlock{ - {}, - {}, + } + + headerBytes, _ := bp.marshalizer.Marshal(header) + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: header, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderBytes: headerBytes, + HeaderHash: []byte("hash"), + Body: &dataBlock.Body{ + MiniBlocks: []*dataBlock.MiniBlock{ + {}, + {}, + }, + }, + }, + TransactionPool: &outport.TransactionPool{}, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, }, - }, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + } + + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Equal(t, nil, err) require.Equal(t, &data.Block{ Nonce: 0, Round: 0, Epoch: 0, Hash: "68617368", - MiniBlocksHashes: []string{"44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", "44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a"}, + MiniBlocksHashes: []string{"8748c4677b01f7db984004fa8465afbf55feaab4b573174c8c0afa282941b9e4", "8748c4677b01f7db984004fa8465afbf55feaab4b573174c8c0afa282941b9e4"}, NotarizedBlocksHashes: nil, Proposer: 0, Validators: nil, PubKeyBitmap: "", - Size: 643, + Size: 898, SizeTxs: 0, Timestamp: 0, StateRootHash: "", @@ -302,7 +369,8 @@ func TestBlockProcessor_PrepareBlockForDBMiniBlocksDetails(t *testing.T) { mbhrBytes, _ := gogoMarshaller.Marshal(mbhr) txHash, notExecutedTxHash, notFoundTxHash, invalidTxHash, rewardsTxHash, scrHash := "tx", "notExecuted", "notFound", "invalid", "reward", "scr" - dbBlock, err := bp.PrepareBlockForDB([]byte("hash"), &dataBlock.Header{ + + header := &dataBlock.Header{ TxCount: 5, MiniBlockHeaders: []dataBlock.MiniBlockHeader{ { @@ -323,63 +391,83 @@ func TestBlockProcessor_PrepareBlockForDBMiniBlocksDetails(t *testing.T) { Type: dataBlock.SmartContractResultBlock, }, }, - }, nil, &dataBlock.Body{ - MiniBlocks: []*dataBlock.MiniBlock{ - { - Type: dataBlock.TxBlock, - TxHashes: [][]byte{[]byte(txHash), []byte(notFoundTxHash), []byte(notExecutedTxHash)}, - }, - { - Type: dataBlock.RewardsBlock, - TxHashes: [][]byte{[]byte(rewardsTxHash)}, - }, - { - Type: dataBlock.InvalidBlock, - TxHashes: [][]byte{[]byte(invalidTxHash)}, - }, - { - Type: dataBlock.SmartContractResultBlock, - TxHashes: [][]byte{[]byte(scrHash)}, - }, - }, - }, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - txHash: &outport.TransactionHandlerWithGasAndFee{ - ExecutionOrder: 2, - }, - notExecutedTxHash: &outport.TransactionHandlerWithGasAndFee{ - ExecutionOrder: 0, - }, - }, - Rewards: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - rewardsTxHash: &outport.TransactionHandlerWithGasAndFee{ - ExecutionOrder: 3, + } + headerBytes, _ := bp.marshalizer.Marshal(header) + outportBlockWithHeader := &outport.OutportBlockWithHeader{ + Header: header, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + HeaderBytes: headerBytes, + HeaderHash: []byte("hash"), + Body: &dataBlock.Body{ + MiniBlocks: []*dataBlock.MiniBlock{ + { + Type: dataBlock.TxBlock, + TxHashes: [][]byte{[]byte(txHash), []byte(notFoundTxHash), []byte(notExecutedTxHash)}, + }, + { + Type: dataBlock.RewardsBlock, + TxHashes: [][]byte{[]byte(rewardsTxHash)}, + }, + { + Type: dataBlock.InvalidBlock, + TxHashes: [][]byte{[]byte(invalidTxHash)}, + }, + { + Type: dataBlock.SmartContractResultBlock, + TxHashes: [][]byte{[]byte(scrHash)}, + }, + }, + }, }, - }, - Invalid: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - invalidTxHash: &outport.TransactionHandlerWithGasAndFee{ - ExecutionOrder: 1, - }}, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - scrHash: &outport.TransactionHandlerWithGasAndFee{ - ExecutionOrder: 0, + TransactionPool: &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString([]byte(txHash)): { + Transaction: &transaction.Transaction{}, + ExecutionOrder: 2, + }, + hex.EncodeToString([]byte(notExecutedTxHash)): { + ExecutionOrder: 0, + }, + }, + Rewards: map[string]*outport.RewardInfo{ + hex.EncodeToString([]byte(rewardsTxHash)): { + Reward: &rewardTx.RewardTx{}, + ExecutionOrder: 3, + }, + }, + InvalidTxs: map[string]*outport.TxInfo{ + hex.EncodeToString([]byte(invalidTxHash)): { + Transaction: &transaction.Transaction{}, + ExecutionOrder: 1, + }}, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString([]byte(scrHash)): { + SmartContractResult: &smartContractResult.SmartContractResult{}, + ExecutionOrder: 0, + }, + }, }, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, }, - }) + } + + dbBlock, err := bp.PrepareBlockForDB(outportBlockWithHeader) require.Nil(t, err) require.Equal(t, &data.Block{ Hash: "68617368", - Size: int64(341), + Size: int64(723), + SizeTxs: 15, AccumulatedFees: "0", DeveloperFees: "0", TxCount: uint32(5), SearchOrder: uint64(1020), MiniBlocksHashes: []string{ - "ee29d9b4a5017b7351974110d6a3f28ce6612476582f16b7849e3e87c647fc2d", - "c067de5b3c0031a14578699b1c3cdb9a19039e4a7b3fae6a94932ad3f70cf375", - "758f925b254ea0a6ad1bcbe3ddfcc73418ed4c8712506aafddc4da703295ad63", - "28a96506c2999838923f5310b3bb1d6849b5a259b429790d9eeb21c2a1402f82", + "8987edec270eb942d8ea9051fe301673aea29890919f5849882617aabcc7a248", + "1183f422a5b76c3cb7b439334f1fe7235c8d09f577e0f1e15e62cd05b9a81950", + "b24e307f3917e84603d3ebfb9c03c8fc651b62cb68ca884c3ff015b66a610a79", + "c0a855563172b2f72be569963d26d4fae38d4371342e2bf3ded93466a72f36f3", }, MiniBlocksDetails: []*data.MiniBlocksDetails{ { diff --git a/process/elasticproc/converters/balanceToFloat.go b/process/elasticproc/converters/balanceToFloat.go index de69e450..e3ae8d6c 100644 --- a/process/elasticproc/converters/balanceToFloat.go +++ b/process/elasticproc/converters/balanceToFloat.go @@ -46,8 +46,8 @@ func (bc *balanceConverter) ComputeBalanceAsFloat(balance *big.Int) (float64, er return bc.computeBalanceAsFloat(balance, bc.balancePrecision) } -// ComputeESDTBalanceAsFloat will compute ESDT balance as float -func (bc *balanceConverter) ComputeESDTBalanceAsFloat(balance *big.Int) (float64, error) { +// ConvertBigValueToFloat will convert big value to float +func (bc *balanceConverter) ConvertBigValueToFloat(balance *big.Int) (float64, error) { return bc.computeBalanceAsFloat(balance, bc.balancePrecisionESDT) } @@ -61,7 +61,7 @@ func (bc *balanceConverter) ComputeSliceOfStringsAsFloat(values []string) ([]flo return nil, errCastStringToBigInt } - valueNum, err := bc.ComputeESDTBalanceAsFloat(valueBig) + valueNum, err := bc.ConvertBigValueToFloat(valueBig) if err != nil { return nil, err } diff --git a/process/elasticproc/converters/balanceToFloat_test.go b/process/elasticproc/converters/balanceToFloat_test.go index 8deb3375..96935dde 100644 --- a/process/elasticproc/converters/balanceToFloat_test.go +++ b/process/elasticproc/converters/balanceToFloat_test.go @@ -59,19 +59,19 @@ func TestComputeBalanceToFloat18Decimals(t *testing.T) { ap, _ := NewBalanceConverter(18) require.NotNil(t, ap) - valueNum, _ := ap.ComputeESDTBalanceAsFloat(big.NewInt(1)) + valueNum, _ := ap.ConvertBigValueToFloat(big.NewInt(1)) require.Equal(t, 1e-18, valueNum) - valueNum, _ = ap.ComputeESDTBalanceAsFloat(big.NewInt(10)) + valueNum, _ = ap.ConvertBigValueToFloat(big.NewInt(10)) require.Equal(t, 1e-17, valueNum) - valueNum, _ = ap.ComputeESDTBalanceAsFloat(big.NewInt(100)) + valueNum, _ = ap.ConvertBigValueToFloat(big.NewInt(100)) require.Equal(t, 1e-16, valueNum) - valueNum, _ = ap.ComputeESDTBalanceAsFloat(big.NewInt(1000)) + valueNum, _ = ap.ConvertBigValueToFloat(big.NewInt(1000)) require.Equal(t, 1e-15, valueNum) - valueNum, _ = ap.ComputeESDTBalanceAsFloat(big.NewInt(0)) + valueNum, _ = ap.ConvertBigValueToFloat(big.NewInt(0)) require.Equal(t, float64(0), valueNum) } @@ -83,14 +83,14 @@ func TestComputeBalanceToFloatInf(t *testing.T) { str := "erd1ahmy0yjhjg87n755yv99nzla22zzwfud55sa69gk3anyxyyucq9q2hgxwwerd1ahmy0yjhjg87n755yv99nzla22zzwfud55sa69gk3anyxyyucq9q2hgxwwerd1ahmy0yjhjg87n755yv99nzla22zzwfud55sa69gk3anyxyyucq9q2hgxwwerd1ahmy0yjhjg87n755yv99nzla22zzwfud55sa69gk3anyxyyucq9q2hgxww" bigValue := big.NewInt(0).SetBytes([]byte(str)) - valueNum, err := ap.ComputeESDTBalanceAsFloat(bigValue) + valueNum, err := ap.ConvertBigValueToFloat(bigValue) require.Equal(t, errValueTooBig, err) require.Equal(t, float64(0), valueNum) hexValueStr := "2642378914478872274757363306845016200438452904128227930177150600998175785079732885392662259024767727006622197340762976891962082611710440131598510606436851189901116516523843401702254087190199876126823217692111058487892984414016231313689031989" decoded, _ := hex.DecodeString(hexValueStr) bigValue = big.NewInt(0).SetBytes(decoded) - valueNum, err = ap.ComputeESDTBalanceAsFloat(bigValue) + valueNum, err = ap.ConvertBigValueToFloat(bigValue) require.Equal(t, errValueTooBig, err) require.Equal(t, float64(0), valueNum) } diff --git a/process/elasticproc/converters/tags.go b/process/elasticproc/converters/tags.go index 1bdb119b..3af2baa0 100644 --- a/process/elasticproc/converters/tags.go +++ b/process/elasticproc/converters/tags.go @@ -5,6 +5,9 @@ import ( ) const ( + // MaxIDSize is the maximum size of a document id + MaxIDSize = 512 + attributesSeparator = ";" keyValuesSeparator = ":" valuesSeparator = "," diff --git a/process/elasticproc/converters/tokenMetaData.go b/process/elasticproc/converters/tokenMetaData.go index f71dea1e..09f5f845 100644 --- a/process/elasticproc/converters/tokenMetaData.go +++ b/process/elasticproc/converters/tokenMetaData.go @@ -6,7 +6,7 @@ import ( "fmt" "strings" - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-es-indexer-go/data" ) @@ -20,7 +20,7 @@ const ( ) // PrepareTokenMetaData will prepare the token metadata in a friendly format for database -func PrepareTokenMetaData(tokenMetadata *outport.TokenMetaData) *data.TokenMetaData { +func PrepareTokenMetaData(tokenMetadata *alteredAccount.TokenMetaData) *data.TokenMetaData { if tokenMetadata == nil { return nil } diff --git a/process/elasticproc/converters/tokenMetaData_test.go b/process/elasticproc/converters/tokenMetaData_test.go index 60bc95de..b68c6551 100644 --- a/process/elasticproc/converters/tokenMetaData_test.go +++ b/process/elasticproc/converters/tokenMetaData_test.go @@ -3,7 +3,7 @@ package converters import ( "testing" - "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/stretchr/testify/require" ) @@ -26,7 +26,7 @@ func TestPrepareTokenMetaData(t *testing.T) { WhiteListedStorage: true, } - result := PrepareTokenMetaData(&outport.TokenMetaData{ + result := PrepareTokenMetaData(&alteredAccount.TokenMetaData{ Nonce: 2, Name: "token", Creator: "creator", diff --git a/process/elasticproc/elasticProcessor.go b/process/elasticproc/elasticProcessor.go index 291d6af7..6835a8df 100644 --- a/process/elasticproc/elasticProcessor.go +++ b/process/elasticproc/elasticProcessor.go @@ -2,15 +2,18 @@ package elasticproc import ( "bytes" + "context" "encoding/hex" "fmt" + "sync" - "github.com/elastic/go-elasticsearch/v7/esapi" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" "github.com/multiversx/mx-chain-es-indexer-go/data" elasticIndexer "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" @@ -19,13 +22,6 @@ import ( logger "github.com/multiversx/mx-chain-logger-go" ) -const ( - docsKey = "docs" - errorKey = "error" - idKey = "_id" - foundKey = "found" -) - var ( log = logger.GetOrCreate("indexer/process") @@ -44,6 +40,7 @@ type objectsMap = map[string]interface{} type ArgElasticProcessor struct { BulkRequestMaxSize int UseKibana bool + ImportDB bool IndexTemplates map[string]*bytes.Buffer IndexPolicies map[string]*bytes.Buffer EnabledIndexes map[string]struct{} @@ -60,7 +57,9 @@ type ArgElasticProcessor struct { type elasticProcessor struct { bulkRequestMaxSize int + importDB bool enabledIndexes map[string]struct{} + mutex sync.RWMutex elasticClient DatabaseClientHandler accountsProc DBAccountHandler blockProc DBBlockHandler @@ -108,6 +107,7 @@ func (ei *elasticProcessor) init(useKibana bool, indexTemplates, _ map[string]*b return err } + //nolint if useKibana { // TODO: Re-activate after we think of a solid way to handle forks+rotating indexes // err = ei.createIndexPolicies(indexPolicies) @@ -200,39 +200,6 @@ func (ei *elasticProcessor) createAliases() error { return nil } -func (ei *elasticProcessor) getExistingObjMap(hashes []string, index string) (map[string]bool, error) { - if len(hashes) == 0 { - return make(map[string]bool), nil - } - - response := make(objectsMap) - err := ei.elasticClient.DoMultiGet(hashes, index, false, &response) - if err != nil { - return make(map[string]bool), err - } - - return getDecodedResponseMultiGet(response), nil -} - -func getDecodedResponseMultiGet(response objectsMap) map[string]bool { - founded := make(map[string]bool) - interfaceSlice, ok := response[docsKey].([]interface{}) - if !ok { - return founded - } - - for _, element := range interfaceSlice { - obj := element.(objectsMap) - _, ok = obj[errorKey] - if ok { - continue - } - founded[obj[idKey].(string)] = obj[foundKey].(bool) - } - - return founded -} - func getTemplateByName(templateName string, templateList map[string]*bytes.Buffer) *bytes.Buffer { if template, ok := templateList[templateName]; ok { return template @@ -243,21 +210,12 @@ func getTemplateByName(templateName string, templateList map[string]*bytes.Buffe } // SaveHeader will prepare and save information about a header in elasticsearch server -func (ei *elasticProcessor) SaveHeader( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - txsSize int, - pool *outport.Pool, -) error { +func (ei *elasticProcessor) SaveHeader(outportBlockWithHeader *outport.OutportBlockWithHeader) error { if !ei.isIndexEnabled(elasticIndexer.BlockIndex) { return nil } - elasticBlock, err := ei.blockProc.PrepareBlockForDB(headerHash, header, signersIndexes, body, notarizedHeadersHashes, gasConsumptionData, txsSize, pool) + elasticBlock, err := ei.blockProc.PrepareBlockForDB(outportBlockWithHeader) if err != nil { return err } @@ -268,12 +226,12 @@ func (ei *elasticProcessor) SaveHeader( return err } - err = ei.indexEpochInfoData(header, buffSlice) + err = ei.indexEpochInfoData(outportBlockWithHeader.Header, buffSlice) if err != nil { return err } - return ei.doBulkRequests("", buffSlice.Buffers()) + return ei.doBulkRequests("", buffSlice.Buffers(), outportBlockWithHeader.ShardID) } func (ei *elasticProcessor) indexEpochInfoData(header coreData.HeaderHandler, buffSlice *data.BufferSlice) error { @@ -292,7 +250,9 @@ func (ei *elasticProcessor) RemoveHeader(header coreData.HeaderHandler) error { return err } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.RemoveTopic, header.GetShardID())) return ei.elasticClient.DoQueryRemove( + ctxWithValue, elasticIndexer.BlockIndex, converters.PrepareHashesForQueryRemove([]string{hex.EncodeToString(headerHash)}), ) @@ -305,7 +265,9 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body return nil } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.RemoveTopic, header.GetShardID())) return ei.elasticClient.DoQueryRemove( + ctxWithValue, elasticIndexer.MiniblocksIndex, converters.PrepareHashesForQueryRemove(encodedMiniblocksHashes), ) @@ -314,23 +276,24 @@ func (ei *elasticProcessor) RemoveMiniblocks(header coreData.HeaderHandler, body // RemoveTransactions will remove transaction that are in miniblock from the elasticsearch server func (ei *elasticProcessor) RemoveTransactions(header coreData.HeaderHandler, body *block.Body) error { encodedTxsHashes, encodedScrsHashes := ei.transactionsProc.GetHexEncodedHashesForRemove(header, body) + shardID := header.GetShardID() - err := ei.removeIfHashesNotEmpty(elasticIndexer.TransactionsIndex, encodedTxsHashes) + err := ei.removeIfHashesNotEmpty(elasticIndexer.TransactionsIndex, encodedTxsHashes, shardID) if err != nil { return err } - err = ei.removeIfHashesNotEmpty(elasticIndexer.ScResultsIndex, encodedScrsHashes) + err = ei.removeIfHashesNotEmpty(elasticIndexer.ScResultsIndex, encodedScrsHashes, shardID) if err != nil { return err } - err = ei.removeIfHashesNotEmpty(elasticIndexer.OperationsIndex, append(encodedTxsHashes, encodedScrsHashes...)) + err = ei.removeIfHashesNotEmpty(elasticIndexer.OperationsIndex, append(encodedTxsHashes, encodedScrsHashes...), shardID) if err != nil { return err } - err = ei.removeIfHashesNotEmpty(elasticIndexer.LogsIndex, append(encodedTxsHashes, encodedScrsHashes...)) + err = ei.removeIfHashesNotEmpty(elasticIndexer.LogsIndex, append(encodedTxsHashes, encodedScrsHashes...), shardID) if err != nil { return err } @@ -347,16 +310,19 @@ func (ei *elasticProcessor) updateDelegatorsInCaseOfRevert(header coreData.Heade return nil } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.UpdateTopic, header.GetShardID())) delegatorsQuery := ei.logsAndEventsProc.PrepareDelegatorsQueryInCaseOfRevert(header.GetTimeStamp()) - return ei.elasticClient.UpdateByQuery(elasticIndexer.DelegatorsIndex, delegatorsQuery) + return ei.elasticClient.UpdateByQuery(ctxWithValue, elasticIndexer.DelegatorsIndex, delegatorsQuery) } -func (ei *elasticProcessor) removeIfHashesNotEmpty(index string, hashes []string) error { +func (ei *elasticProcessor) removeIfHashesNotEmpty(index string, hashes []string, shardID uint32) error { if len(hashes) == 0 { return nil } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.RemoveTopic, shardID)) return ei.elasticClient.DoQueryRemove( + ctxWithValue, index, converters.PrepareHashesForQueryRemove(hashes), ) @@ -364,8 +330,10 @@ func (ei *elasticProcessor) removeIfHashesNotEmpty(index string, hashes []string // RemoveAccountsESDT will remove data from accountsesdt index and accountsesdthistory func (ei *elasticProcessor) RemoveAccountsESDT(headerTimestamp uint64, shardID uint32) error { + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.RemoveTopic, shardID)) query := fmt.Sprintf(`{"query": {"bool": {"must": [{"match": {"shardID": {"query": %d,"operator": "AND"}}},{"match": {"timestamp": {"query": "%d","operator": "AND"}}}]}}}`, shardID, headerTimestamp) err := ei.elasticClient.DoQueryRemove( + ctxWithValue, elasticIndexer.AccountsESDTIndex, bytes.NewBuffer([]byte(query)), ) @@ -374,63 +342,44 @@ func (ei *elasticProcessor) RemoveAccountsESDT(headerTimestamp uint64, shardID u } return ei.elasticClient.DoQueryRemove( + ctxWithValue, elasticIndexer.AccountsESDTHistoryIndex, bytes.NewBuffer([]byte(query)), ) } // SaveMiniblocks will prepare and save information about miniblocks in elasticsearch server -func (ei *elasticProcessor) SaveMiniblocks(header coreData.HeaderHandler, body *block.Body) error { +func (ei *elasticProcessor) SaveMiniblocks(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) error { if !ei.isIndexEnabled(elasticIndexer.MiniblocksIndex) { return nil } - mbs := ei.miniblocksProc.PrepareDBMiniblocks(header, body) + mbs := ei.miniblocksProc.PrepareDBMiniblocks(header, miniBlocks) if len(mbs) == 0 { return nil } - miniblocksInDBMap, err := ei.miniblocksInDBMap(mbs) - if err != nil { - log.Warn("elasticProcessor.SaveMiniblocks cannot get indexed miniblocks", "error", err) - } - buffSlice := data.NewBufferSlice(ei.bulkRequestMaxSize) - ei.miniblocksProc.SerializeBulkMiniBlocks(mbs, miniblocksInDBMap, buffSlice, elasticIndexer.MiniblocksIndex, header.GetShardID()) - - return ei.doBulkRequests("", buffSlice.Buffers()) -} - -func (ei *elasticProcessor) miniblocksInDBMap(mbs []*data.Miniblock) (map[string]bool, error) { - mbsHashes := make([]string, len(mbs)) - for idx := range mbs { - mbsHashes[idx] = mbs[idx].Hash - } + ei.miniblocksProc.SerializeBulkMiniBlocks(mbs, buffSlice, elasticIndexer.MiniblocksIndex, header.GetShardID()) - return ei.getExistingObjMap(mbsHashes, elasticIndexer.MiniblocksIndex) + return ei.doBulkRequests("", buffSlice.Buffers(), header.GetShardID()) } // SaveTransactions will prepare and save information about a transactions in elasticsearch server -func (ei *elasticProcessor) SaveTransactions( - body *block.Body, - header coreData.HeaderHandler, - pool *outport.Pool, - coreAlteredAccounts map[string]*outport.AlteredAccount, - isImportDB bool, - numOfShards uint32, -) error { - headerTimestamp := header.GetTimeStamp() +func (ei *elasticProcessor) SaveTransactions(obh *outport.OutportBlockWithHeader) error { + headerTimestamp := obh.Header.GetTimeStamp() - preparedResults := ei.transactionsProc.PrepareTransactionsForDatabase(body, header, pool, isImportDB, numOfShards) - logsData := ei.logsAndEventsProc.ExtractDataFromLogs(pool.Logs, preparedResults, headerTimestamp, header.GetShardID(), numOfShards) + miniBlocks := append(obh.BlockData.Body.MiniBlocks, obh.BlockData.IntraShardMiniBlocks...) + preparedResults := ei.transactionsProc.PrepareTransactionsForDatabase(miniBlocks, obh.Header, obh.TransactionPool, ei.isImportDB(), obh.NumberOfShards) + logsData := ei.logsAndEventsProc.ExtractDataFromLogs(obh.TransactionPool.Logs, preparedResults, headerTimestamp, obh.Header.GetShardID(), obh.NumberOfShards) buffers := data.NewBufferSlice(ei.bulkRequestMaxSize) - err := ei.indexTransactions(preparedResults.Transactions, preparedResults.TxHashStatus, header, buffers) + err := ei.indexTransactions(preparedResults.Transactions, logsData.TxHashStatusInfo, obh.Header, buffers) if err != nil { return err } - err = ei.prepareAndIndexOperations(preparedResults.Transactions, preparedResults.TxHashStatus, header, preparedResults.ScResults, buffers, isImportDB) + err = ei.prepareAndIndexOperations(preparedResults.Transactions, logsData.TxHashStatusInfo, obh.Header, preparedResults.ScResults, buffers, ei.isImportDB()) if err != nil { return err } @@ -440,12 +389,12 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.indexNFTCreateInfo(logsData.Tokens, coreAlteredAccounts, buffers) + err = ei.indexNFTCreateInfo(logsData.Tokens, obh.AlteredAccounts, buffers, obh.ShardID) if err != nil { return err } - err = ei.prepareAndIndexLogs(pool.Logs, headerTimestamp, buffers) + err = ei.prepareAndIndexLogs(obh.TransactionPool.Logs, headerTimestamp, buffers) if err != nil { return err } @@ -461,7 +410,7 @@ func (ei *elasticProcessor) SaveTransactions( } tagsCount := tags.NewTagsCount() - err = ei.indexAlteredAccounts(headerTimestamp, logsData.NFTsDataUpdates, coreAlteredAccounts, buffers, tagsCount, header.GetShardID()) + err = ei.indexAlteredAccounts(headerTimestamp, logsData.NFTsDataUpdates, obh.AlteredAccounts, buffers, tagsCount, obh.Header.GetShardID()) if err != nil { return err } @@ -471,7 +420,7 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.indexTokens(logsData.TokensInfo, logsData.NFTsDataUpdates, buffers) + err = ei.indexTokens(logsData.TokensInfo, logsData.NFTsDataUpdates, buffers, obh.ShardID) if err != nil { return err } @@ -481,7 +430,7 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.indexNFTBurnInfo(logsData.TokensSupply, buffers) + err = ei.indexNFTBurnInfo(logsData.TokensSupply, buffers, obh.ShardID) if err != nil { return err } @@ -495,12 +444,12 @@ func (ei *elasticProcessor) SaveTransactions( return err } - err = ei.indexScDeploys(logsData.ScDeploys, buffers) + err = ei.indexScDeploys(logsData.ScDeploys, logsData.ChangeOwnerOperations, buffers) if err != nil { return err } - return ei.doBulkRequests("", buffers.Buffers()) + return ei.doBulkRequests("", buffers.Buffers(), obh.ShardID) } func (ei *elasticProcessor) prepareAndIndexRolesData(tokenRolesAndProperties *tokeninfo.TokenRolesAndProperties, buffSlice *data.BufferSlice, index string) error { @@ -532,7 +481,7 @@ func (ei *elasticProcessor) indexTransactionsFeeData(txsHashFeeData map[string]* return ei.transactionsProc.SerializeTransactionsFeeData(txsHashFeeData, buffSlice, elasticIndexer.OperationsIndex) } -func (ei *elasticProcessor) prepareAndIndexLogs(logsAndEvents []*coreData.LogData, timestamp uint64, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) prepareAndIndexLogs(logsAndEvents []*outport.LogData, timestamp uint64, buffSlice *data.BufferSlice) error { if !ei.isIndexEnabled(elasticIndexer.LogsIndex) { return nil } @@ -542,25 +491,30 @@ func (ei *elasticProcessor) prepareAndIndexLogs(logsAndEvents []*coreData.LogDat return ei.logsAndEventsProc.SerializeLogs(logsDB, buffSlice, elasticIndexer.LogsIndex) } -func (ei *elasticProcessor) indexScDeploys(deployData map[string]*data.ScDeployInfo, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) indexScDeploys(deployData map[string]*data.ScDeployInfo, changeOwnerOperation map[string]*data.OwnerData, buffSlice *data.BufferSlice) error { if !ei.isIndexEnabled(elasticIndexer.SCDeploysIndex) { return nil } - return ei.logsAndEventsProc.SerializeSCDeploys(deployData, buffSlice, elasticIndexer.SCDeploysIndex) + err := ei.logsAndEventsProc.SerializeSCDeploys(deployData, buffSlice, elasticIndexer.SCDeploysIndex) + if err != nil { + return err + } + + return ei.logsAndEventsProc.SerializeChangeOwnerOperations(changeOwnerOperation, buffSlice, elasticIndexer.SCDeploysIndex) } -func (ei *elasticProcessor) indexTransactions(txs []*data.Transaction, txHashStatus map[string]string, header coreData.HeaderHandler, bytesBuff *data.BufferSlice) error { +func (ei *elasticProcessor) indexTransactions(txs []*data.Transaction, txHashStatusInfo map[string]*outport.StatusInfo, header coreData.HeaderHandler, bytesBuff *data.BufferSlice) error { if !ei.isIndexEnabled(elasticIndexer.TransactionsIndex) { return nil } - return ei.transactionsProc.SerializeTransactions(txs, txHashStatus, header.GetShardID(), bytesBuff, elasticIndexer.TransactionsIndex) + return ei.transactionsProc.SerializeTransactions(txs, txHashStatusInfo, header.GetShardID(), bytesBuff, elasticIndexer.TransactionsIndex) } func (ei *elasticProcessor) prepareAndIndexOperations( txs []*data.Transaction, - txHashStatus map[string]string, + txHashStatusInfo map[string]*outport.StatusInfo, header coreData.HeaderHandler, scrs []*data.ScResult, buffSlice *data.BufferSlice, @@ -572,7 +526,7 @@ func (ei *elasticProcessor) prepareAndIndexOperations( processedTxs, processedSCRs := ei.operationsProc.ProcessTransactionsAndSCRs(txs, scrs, isImportDB, header.GetShardID()) - err := ei.transactionsProc.SerializeTransactions(processedTxs, txHashStatus, header.GetShardID(), buffSlice, elasticIndexer.OperationsIndex) + err := ei.transactionsProc.SerializeTransactions(processedTxs, txHashStatusInfo, header.GetShardID(), buffSlice, elasticIndexer.OperationsIndex) if err != nil { return err } @@ -581,55 +535,49 @@ func (ei *elasticProcessor) prepareAndIndexOperations( } // SaveValidatorsRating will save validators rating -func (ei *elasticProcessor) SaveValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) error { +func (ei *elasticProcessor) SaveValidatorsRating(ratingData *outport.ValidatorsRating) error { if !ei.isIndexEnabled(elasticIndexer.RatingIndex) { return nil } - buffSlice, err := ei.validatorsProc.SerializeValidatorsRating(index, validatorsRatingInfo) + buffSlice, err := ei.validatorsProc.SerializeValidatorsRating(ratingData) if err != nil { return err } - return ei.doBulkRequests(elasticIndexer.RatingIndex, buffSlice) + return ei.doBulkRequests(elasticIndexer.RatingIndex, buffSlice, ratingData.ShardID) } // SaveShardValidatorsPubKeys will prepare and save information about a shard validators public keys in elasticsearch server -func (ei *elasticProcessor) SaveShardValidatorsPubKeys(shardID, epoch uint32, shardValidatorsPubKeys [][]byte) error { +func (ei *elasticProcessor) SaveShardValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error { if !ei.isIndexEnabled(elasticIndexer.ValidatorsIndex) { return nil } - validatorsPubKeys := ei.validatorsProc.PrepareValidatorsPublicKeys(shardValidatorsPubKeys) - buff, err := ei.validatorsProc.SerializeValidatorsPubKeys(validatorsPubKeys) + buffSlice, err := ei.validatorsProc.PrepareAnSerializeValidatorsPubKeys(validatorsPubKeys) if err != nil { return err } - req := &esapi.IndexRequest{ - Index: elasticIndexer.ValidatorsIndex, - DocumentID: fmt.Sprintf("%d_%d", shardID, epoch), - Body: bytes.NewReader(buff.Bytes()), - } - - return ei.elasticClient.DoRequest(req) + return ei.doBulkRequests(elasticIndexer.ValidatorsIndex, buffSlice, validatorsPubKeys.ShardID) } // SaveRoundsInfo will prepare and save information about a slice of rounds in elasticsearch server -func (ei *elasticProcessor) SaveRoundsInfo(info []*data.RoundInfo) error { +func (ei *elasticProcessor) SaveRoundsInfo(rounds *outport.RoundsInfo) error { if !ei.isIndexEnabled(elasticIndexer.RoundsIndex) { return nil } - buff := ei.statisticsProc.SerializeRoundsInfo(info) + buff := ei.statisticsProc.SerializeRoundsInfo(rounds) - return ei.elasticClient.DoBulkRequest(buff, elasticIndexer.RoundsIndex) + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.BulkTopic, rounds.ShardID)) + return ei.elasticClient.DoBulkRequest(ctxWithValue, buff, elasticIndexer.RoundsIndex) } func (ei *elasticProcessor) indexAlteredAccounts( timestamp uint64, updatesNFTsData []*data.NFTDataUpdate, - coreAlteredAccounts map[string]*outport.AlteredAccount, + coreAlteredAccounts map[string]*alteredAccount.AlteredAccount, buffSlice *data.BufferSlice, tagsCount data.CountTags, shardID uint32, @@ -653,7 +601,7 @@ func (ei *elasticProcessor) saveAccountsESDT( shardID uint32, ) error { accountsESDTMap, tokensData := ei.accountsProc.PrepareAccountsMapESDT(timestamp, wrappedAccounts, tagsCount, shardID) - err := ei.addTokenTypeAndCurrentOwnerInAccountsESDT(tokensData, accountsESDTMap) + err := ei.addTokenTypeAndCurrentOwnerInAccountsESDT(tokensData, accountsESDTMap, shardID) if err != nil { return err } @@ -666,13 +614,14 @@ func (ei *elasticProcessor) saveAccountsESDT( return ei.saveAccountsESDTHistory(timestamp, accountsESDTMap, buffSlice, shardID) } -func (ei *elasticProcessor) addTokenTypeAndCurrentOwnerInAccountsESDT(tokensData data.TokensHandler, accountsESDTMap map[string]*data.AccountInfo) error { +func (ei *elasticProcessor) addTokenTypeAndCurrentOwnerInAccountsESDT(tokensData data.TokensHandler, accountsESDTMap map[string]*data.AccountInfo, shardID uint32) error { if check.IfNil(tokensData) || tokensData.Len() == 0 { return nil } responseTokens := &data.ResponseTokens{} - err := ei.elasticClient.DoMultiGet(tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.GetTopic, shardID)) + err := ei.elasticClient.DoMultiGet(ctxWithValue, tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) if err != nil { return err } @@ -704,14 +653,15 @@ func (ei *elasticProcessor) indexAccountsESDT( return ei.accountsProc.SerializeAccountsESDT(accountsESDTMap, updatesNFTsData, buffSlice, elasticIndexer.AccountsESDTIndex) } -func (ei *elasticProcessor) indexNFTCreateInfo(tokensData data.TokensHandler, coreAlteredAccounts map[string]*outport.AlteredAccount, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) indexNFTCreateInfo(tokensData data.TokensHandler, coreAlteredAccounts map[string]*alteredAccount.AlteredAccount, buffSlice *data.BufferSlice, shardID uint32) error { shouldSkipIndex := !ei.isIndexEnabled(elasticIndexer.TokensIndex) || tokensData.Len() == 0 if shouldSkipIndex { return nil } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.GetTopic, shardID)) responseTokens := &data.ResponseTokens{} - err := ei.elasticClient.DoMultiGet(tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) + err := ei.elasticClient.DoMultiGet(ctxWithValue, tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) if err != nil { return err } @@ -724,14 +674,15 @@ func (ei *elasticProcessor) indexNFTCreateInfo(tokensData data.TokensHandler, co return ei.accountsProc.SerializeNFTCreateInfo(tokens, buffSlice, elasticIndexer.TokensIndex) } -func (ei *elasticProcessor) indexNFTBurnInfo(tokensData data.TokensHandler, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) indexNFTBurnInfo(tokensData data.TokensHandler, buffSlice *data.BufferSlice, shardID uint32) error { shouldSkipIndex := !ei.isIndexEnabled(elasticIndexer.TokensIndex) || tokensData.Len() == 0 if shouldSkipIndex { return nil } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.GetTopic, shardID)) responseTokens := &data.ResponseTokens{} - err := ei.elasticClient.DoMultiGet(tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) + err := ei.elasticClient.DoMultiGet(ctxWithValue, tokensData.GetAllTokens(), elasticIndexer.TokensIndex, true, responseTokens) if err != nil { return err } @@ -742,9 +693,18 @@ func (ei *elasticProcessor) indexNFTBurnInfo(tokensData data.TokensHandler, buff } // SaveAccounts will prepare and save information about provided accounts in elasticsearch server -func (ei *elasticProcessor) SaveAccounts(timestamp uint64, accts []*data.Account, shardID uint32) error { +func (ei *elasticProcessor) SaveAccounts(accountsData *outport.Accounts) error { buffSlice := data.NewBufferSlice(ei.bulkRequestMaxSize) - return ei.saveAccounts(timestamp, accts, buffSlice, shardID) + + accounts := make([]*data.Account, 0, len(accountsData.AlteredAccounts)) + for _, account := range accountsData.AlteredAccounts { + accounts = append(accounts, &data.Account{ + UserAccount: account, + IsSender: false, + }) + } + + return ei.saveAccounts(accountsData.BlockTimestamp, accounts, buffSlice, accountsData.ShardID) } func (ei *elasticProcessor) saveAccounts(timestamp uint64, accts []*data.Account, buffSlice *data.BufferSlice, shardID uint32) error { @@ -814,10 +774,11 @@ func (ei *elasticProcessor) isIndexEnabled(index string) bool { return isEnabled } -func (ei *elasticProcessor) doBulkRequests(index string, buffSlice []*bytes.Buffer) error { +func (ei *elasticProcessor) doBulkRequests(index string, buffSlice []*bytes.Buffer, shardID uint32) error { var err error for idx := range buffSlice { - err = ei.elasticClient.DoBulkRequest(buffSlice[idx], index) + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.BulkTopic, shardID)) + err = ei.elasticClient.DoBulkRequest(ctxWithValue, buffSlice[idx], index) if err != nil { return err } @@ -826,6 +787,23 @@ func (ei *elasticProcessor) doBulkRequests(index string, buffSlice []*bytes.Buff return nil } +// SetOutportConfig will set the outport config +func (ei *elasticProcessor) SetOutportConfig(cfg outport.OutportConfig) error { + ei.mutex.Lock() + defer ei.mutex.Unlock() + + ei.importDB = cfg.IsInImportDBMode + + return nil +} + +func (ei *elasticProcessor) isImportDB() bool { + ei.mutex.RLock() + defer ei.mutex.RUnlock() + + return ei.importDB +} + // IsInterfaceNil returns true if there is no value under the interface func (ei *elasticProcessor) IsInterfaceNil() bool { return ei == nil diff --git a/process/elasticproc/elasticProcessor_test.go b/process/elasticproc/elasticProcessor_test.go index 427922c2..381db5db 100644 --- a/process/elasticproc/elasticProcessor_test.go +++ b/process/elasticproc/elasticProcessor_test.go @@ -3,16 +3,10 @@ package elasticproc import ( "bytes" "encoding/hex" - "encoding/json" "errors" - "fmt" - "io/ioutil" - "math/big" - "strconv" "strings" "testing" - "github.com/elastic/go-elasticsearch/v7/esapi" "github.com/multiversx/mx-chain-core-go/core" coreData "github.com/multiversx/mx-chain-core-go/data" dataBlock "github.com/multiversx/mx-chain-core-go/data/block" @@ -48,6 +42,22 @@ func newElasticsearchProcessor(elasticsearchWriter DatabaseClientHandler, argume } } +func createEmptyOutportBlockWithHeader() *outport.OutportBlockWithHeader { + signerIndexes := []uint64{0, 1} + header := &dataBlock.Header{Nonce: 1} + return &outport.OutportBlockWithHeader{ + Header: header, + OutportBlock: &outport.OutportBlock{ + BlockData: &outport.BlockData{ + Body: &dataBlock.Body{}, + }, + SignersIndexes: signerIndexes, + HeaderGasConsumption: &outport.HeaderGasConsumption{}, + TransactionPool: &outport.TransactionPool{}, + }, + } +} + func createMockElasticProcessorArgs() *ArgElasticProcessor { balanceConverter, _ := converters.NewBalanceConverter(10) @@ -80,43 +90,6 @@ func createMockElasticProcessorArgs() *ArgElasticProcessor { } } -func newTestTxPool() map[string]coreData.TransactionHandlerWithGasUsedAndFee { - txPool := map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "tx1": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - Nonce: uint64(1), - Value: big.NewInt(1), - RcvAddr: []byte("receiver_address1"), - SndAddr: []byte("sender_address1"), - GasPrice: uint64(10000), - GasLimit: uint64(1000), - Data: []byte("tx_data1"), - Signature: []byte("signature1"), - }, 0, big.NewInt(0)), - "tx2": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - Nonce: uint64(2), - Value: big.NewInt(2), - RcvAddr: []byte("receiver_address2"), - SndAddr: []byte("sender_address2"), - GasPrice: uint64(10000), - GasLimit: uint64(1000), - Data: []byte("tx_data2"), - Signature: []byte("signature2"), - }, 0, big.NewInt(0)), - "tx3": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - Nonce: uint64(3), - Value: big.NewInt(3), - RcvAddr: []byte("receiver_address3"), - SndAddr: []byte("sender_address3"), - GasPrice: uint64(10000), - GasLimit: uint64(1000), - Data: []byte("tx_data3"), - Signature: []byte("signature3"), - }, 0, big.NewInt(0)), - } - - return txPool -} - func newTestBlockBody() *dataBlock.Body { return &dataBlock.Body{ MiniBlocks: []*dataBlock.MiniBlock{ @@ -349,8 +322,7 @@ func TestElasticProcessor_RemoveMiniblocks(t *testing.T) { func TestElasticseachDatabaseSaveHeader_RequestError(t *testing.T) { localErr := errors.New("localErr") - header := &dataBlock.Header{Nonce: 1} - signerIndexes := []uint64{0, 1} + arguments := createMockElasticProcessorArgs() dbWriter := &mock.DatabaseWriterStub{ DoBulkRequestCalled: func(buff *bytes.Buffer, index string) error { @@ -360,51 +332,10 @@ func TestElasticseachDatabaseSaveHeader_RequestError(t *testing.T) { arguments.BlockProc, _ = block.NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveHeader([]byte("hh"), header, signerIndexes, &dataBlock.Body{}, nil, outport.HeaderGasConsumption{}, 1, &outport.Pool{}) + err := elasticDatabase.SaveHeader(createEmptyOutportBlockWithHeader()) require.Equal(t, localErr, err) } -func TestElasticseachDatabaseSaveHeader_CheckRequestBody(t *testing.T) { - header := &dataBlock.Header{ - Nonce: 1, - } - signerIndexes := []uint64{0, 1} - - miniBlock := &dataBlock.MiniBlock{ - Type: dataBlock.TxBlock, - } - blockBody := &dataBlock.Body{ - MiniBlocks: []*dataBlock.MiniBlock{ - miniBlock, - }, - } - - arguments := createMockElasticProcessorArgs() - - mbHash, _ := core.CalculateHash(&mock.MarshalizerMock{}, &mock.HasherMock{}, miniBlock) - hexEncodedHash := hex.EncodeToString(mbHash) - - dbWriter := &mock.DatabaseWriterStub{ - DoRequestCalled: func(req *esapi.IndexRequest) error { - require.Equal(t, dataindexer.BlockIndex, req.Index) - - var bl data.Block - blockBytes, _ := ioutil.ReadAll(req.Body) - _ = json.Unmarshal(blockBytes, &bl) - require.Equal(t, header.Nonce, bl.Nonce) - require.Equal(t, hexEncodedHash, bl.MiniBlocksHashes[0]) - require.Equal(t, signerIndexes, bl.Validators) - - return nil - }, - } - - arguments.BlockProc, _ = block.NewBlockProcessor(&mock.HasherMock{}, &mock.MarshalizerMock{}) - elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveHeader([]byte("hh"), header, signerIndexes, blockBody, nil, outport.HeaderGasConsumption{}, 1, &outport.Pool{}) - require.Nil(t, err) -} - func TestElasticseachSaveTransactions(t *testing.T) { localErr := errors.New("localErr") arguments := createMockElasticProcessorArgs() @@ -416,7 +347,6 @@ func TestElasticseachSaveTransactions(t *testing.T) { body := newTestBlockBody() header := &dataBlock.Header{Nonce: 1, TxCount: 2} - txPool := newTestTxPool() bc, _ := converters.NewBalanceConverter(18) args := &transactions.ArgsTransactionProcessor{ @@ -428,18 +358,21 @@ func TestElasticseachSaveTransactions(t *testing.T) { txDbProc, _ := transactions.NewTransactionsProcessor(args) arguments.TransactionsProc = txDbProc + outportBlock := createEmptyOutportBlockWithHeader() + outportBlock.Header = header + outportBlock.BlockData.Body = body + outportBlock.TransactionPool.Transactions = map[string]*outport.TxInfo{ + hex.EncodeToString([]byte("tx1")): {Transaction: &transaction.Transaction{}, FeeInfo: &outport.FeeInfo{}}, + } + elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - pool := &outport.Pool{Txs: txPool} - err := elasticDatabase.SaveTransactions(body, header, pool, nil, false, 3) + err := elasticDatabase.SaveTransactions(outportBlock) require.Equal(t, localErr, err) } func TestElasticProcessor_SaveValidatorsRating(t *testing.T) { - docID := "0_1" localErr := errors.New("localErr") - blsKey := "bls" - arguments := createMockElasticProcessorArgs() arguments.DBClient = &mock.DatabaseWriterStub{ DoBulkRequestCalled: func(buff *bytes.Buffer, index string) error { @@ -450,15 +383,11 @@ func TestElasticProcessor_SaveValidatorsRating(t *testing.T) { arguments.ValidatorsProc, _ = validators.NewValidatorsProcessor(mock.NewPubkeyConverterMock(32), 0) elasticProc, _ := NewElasticProcessor(arguments) - err := elasticProc.SaveValidatorsRating( - docID, - []*data.ValidatorRatingInfo{ - { - PublicKey: blsKey, - Rating: 100, - }, - }, - ) + err := elasticProc.SaveValidatorsRating(&outport.ValidatorsRating{ + ShardID: 0, + Epoch: 1, + ValidatorsRatingInfo: []*outport.ValidatorRatingInfo{{}}, + }) require.Equal(t, localErr, err) } @@ -482,7 +411,7 @@ func TestElasticProcessor_SaveMiniblocks(t *testing.T) { body := &dataBlock.Body{MiniBlocks: dataBlock.MiniBlockSlice{ {SenderShardID: 0, ReceiverShardID: 1}, }} - err := elasticProc.SaveMiniblocks(header, body) + err := elasticProc.SaveMiniblocks(header, body.MiniBlocks) require.Equal(t, localErr, err) } @@ -493,53 +422,24 @@ func TestElasticsearch_saveShardValidatorsPubKeys_RequestError(t *testing.T) { localErr := errors.New("localErr") arguments := createMockElasticProcessorArgs() dbWriter := &mock.DatabaseWriterStub{ - DoRequestCalled: func(req *esapi.IndexRequest) error { + DoBulkRequestCalled: func(buff *bytes.Buffer, index string) error { return localErr }, } arguments.ValidatorsProc, _ = validators.NewValidatorsProcessor(mock.NewPubkeyConverterMock(32), 0) elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveShardValidatorsPubKeys(shardID, epoch, valPubKeys) - require.Equal(t, localErr, err) -} - -func TestElasticsearch_saveShardValidatorsPubKeys(t *testing.T) { - shardID := uint32(0) - epoch := uint32(0) - valPubKeys := [][]byte{[]byte("key1"), []byte("key2")} - arguments := createMockElasticProcessorArgs() - dbWriter := &mock.DatabaseWriterStub{ - DoRequestCalled: func(req *esapi.IndexRequest) error { - require.Equal(t, fmt.Sprintf("%d_%d", shardID, epoch), req.DocumentID) - return nil + err := elasticDatabase.SaveShardValidatorsPubKeys(&outport.ValidatorsPubKeys{ + Epoch: epoch, + ShardValidatorsPubKeys: map[uint32]*outport.PubKeys{ + shardID: {Keys: valPubKeys}, }, - } - elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - - err := elasticDatabase.SaveShardValidatorsPubKeys(shardID, epoch, valPubKeys) - require.Nil(t, err) -} - -func TestElasticsearch_saveRoundInfo(t *testing.T) { - roundInfo := &data.RoundInfo{ - Index: 1, ShardId: 0, BlockWasProposed: true, - } - arguments := createMockElasticProcessorArgs() - dbWriter := &mock.DatabaseWriterStub{ - DoRequestCalled: func(req *esapi.IndexRequest) error { - require.Equal(t, strconv.FormatUint(uint64(roundInfo.ShardId), 10)+"_"+strconv.FormatUint(roundInfo.Index, 10), req.DocumentID) - return nil - }, - } - elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - - err := elasticDatabase.SaveRoundsInfo([]*data.RoundInfo{roundInfo}) - require.Nil(t, err) + }) + require.Equal(t, localErr, err) } func TestElasticsearch_saveRoundInfoRequestError(t *testing.T) { - roundInfo := &data.RoundInfo{} + roundInfo := &outport.RoundInfo{} localError := errors.New("local err") arguments := createMockElasticProcessorArgs() dbWriter := &mock.DatabaseWriterStub{ @@ -549,7 +449,7 @@ func TestElasticsearch_saveRoundInfoRequestError(t *testing.T) { } elasticDatabase := newElasticsearchProcessor(dbWriter, arguments) - err := elasticDatabase.SaveRoundsInfo([]*data.RoundInfo{roundInfo}) + err := elasticDatabase.SaveRoundsInfo(&outport.RoundsInfo{RoundsInfo: []*outport.RoundInfo{roundInfo}}) require.Equal(t, localError, err) } @@ -622,10 +522,7 @@ func TestElasticProcessor_IndexEpochInfoData(t *testing.T) { err := elasticSearchProc.indexEpochInfoData(shardHeader, buffSlice) require.True(t, errors.Is(err, dataindexer.ErrHeaderTypeAssertion)) - body := &dataBlock.Body{} - metaHeader := &dataBlock.MetaBlock{} - - err = elasticSearchProc.SaveHeader([]byte("hh"), metaHeader, nil, body, nil, outport.HeaderGasConsumption{}, 0, &outport.Pool{}) + err = elasticSearchProc.SaveHeader(createEmptyOutportBlockWithHeader()) require.Nil(t, err) require.True(t, called) } @@ -634,7 +531,7 @@ func TestElasticProcessor_SaveTransactionNoDataShouldNotDoRequest(t *testing.T) called := false arguments := createMockElasticProcessorArgs() arguments.TransactionsProc = &mock.DBTransactionProcessorStub{ - PrepareTransactionsForDatabaseCalled: func(body *dataBlock.Body, header coreData.HeaderHandler, pool *outport.Pool) *data.PreparedResults { + PrepareTransactionsForDatabaseCalled: func(mbs []*dataBlock.MiniBlock, header coreData.HeaderHandler, pool *outport.TransactionPool) *data.PreparedResults { return &data.PreparedResults{ Transactions: nil, ScResults: nil, @@ -655,7 +552,7 @@ func TestElasticProcessor_SaveTransactionNoDataShouldNotDoRequest(t *testing.T) elasticSearchProc := newElasticsearchProcessor(dbWriter, arguments) elasticSearchProc.enabledIndexes[dataindexer.ScResultsIndex] = struct{}{} - err := elasticSearchProc.SaveTransactions(&dataBlock.Body{}, &dataBlock.Header{}, &outport.Pool{}, nil, false, 3) + err := elasticSearchProc.SaveTransactions(createEmptyOutportBlockWithHeader()) require.Nil(t, err) require.False(t, called) } diff --git a/process/elasticproc/factory/elasticProcessorFactory.go b/process/elasticproc/factory/elasticProcessorFactory.go index dc5033bc..f10ed48f 100644 --- a/process/elasticproc/factory/elasticProcessorFactory.go +++ b/process/elasticproc/factory/elasticProcessorFactory.go @@ -29,6 +29,7 @@ type ArgElasticProcessorFactory struct { Denomination int BulkRequestMaxSize int UseKibana bool + ImportDB bool } // CreateElasticProcessor will create a new instance of ElasticProcessor @@ -118,6 +119,7 @@ func CreateElasticProcessor(arguments ArgElasticProcessorFactory) (dataindexer.E IndexTemplates: indexTemplates, IndexPolicies: indexPolicies, OperationsProc: operationsProc, + ImportDB: arguments.ImportDB, } return elasticproc.NewElasticProcessor(args) diff --git a/process/elasticproc/interface.go b/process/elasticproc/interface.go index 4cfcb78a..c059e6cb 100644 --- a/process/elasticproc/interface.go +++ b/process/elasticproc/interface.go @@ -2,9 +2,10 @@ package elasticproc import ( "bytes" + "context" - "github.com/elastic/go-elasticsearch/v7/esapi" coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" @@ -13,13 +14,12 @@ import ( // DatabaseClientHandler defines the actions that a component that handles requests should do type DatabaseClientHandler interface { - DoRequest(req *esapi.IndexRequest) error - DoBulkRequest(buff *bytes.Buffer, index string) error - DoQueryRemove(index string, buff *bytes.Buffer) error - DoMultiGet(ids []string, index string, withSource bool, res interface{}) error - DoScrollRequest(index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error - DoCountRequest(index string, body []byte) (uint64, error) - UpdateByQuery(index string, buff *bytes.Buffer) error + DoBulkRequest(ctx context.Context, buff *bytes.Buffer, index string) error + DoQueryRemove(ctx context.Context, index string, buff *bytes.Buffer) error + DoMultiGet(ctx context.Context, ids []string, index string, withSource bool, res interface{}) error + DoScrollRequest(ctx context.Context, index string, body []byte, withSource bool, handlerFunc func(responseBytes []byte) error) error + DoCountRequest(ctx context.Context, index string, body []byte) (uint64, error) + UpdateByQuery(ctx context.Context, index string, buff *bytes.Buffer) error CheckAndCreateIndex(index string) error CheckAndCreateAlias(alias string, index string) error @@ -31,11 +31,11 @@ type DatabaseClientHandler interface { // DBAccountHandler defines the actions that an accounts' handler should do type DBAccountHandler interface { - GetAccounts(coreAlteredAccounts map[string]*outport.AlteredAccount) ([]*data.Account, []*data.AccountESDT) + GetAccounts(coreAlteredAccounts map[string]*alteredAccount.AlteredAccount) ([]*data.Account, []*data.AccountESDT) PrepareRegularAccountsMap(timestamp uint64, accounts []*data.Account, shardID uint32) map[string]*data.AccountInfo PrepareAccountsMapESDT(timestamp uint64, accounts []*data.AccountESDT, tagsCount data.CountTags, shardID uint32) (map[string]*data.AccountInfo, data.TokensHandler) PrepareAccountsHistory(timestamp uint64, accounts map[string]*data.AccountInfo, shardID uint32) map[string]*data.AccountBalanceHistory - PutTokenMedataDataInTokens(tokensData []*data.TokenInfo, coreAlteredAccounts map[string]*outport.AlteredAccount) + PutTokenMedataDataInTokens(tokensData []*data.TokenInfo, coreAlteredAccounts map[string]*alteredAccount.AlteredAccount) SerializeAccountsHistory(accounts map[string]*data.AccountBalanceHistory, buffSlice *data.BufferSlice, index string) error SerializeAccounts(accounts map[string]*data.AccountInfo, buffSlice *data.BufferSlice, index string) error @@ -46,16 +46,7 @@ type DBAccountHandler interface { // DBBlockHandler defines the actions that a block handler should do type DBBlockHandler interface { - PrepareBlockForDB( - headerHash []byte, - header coreData.HeaderHandler, - signersIndexes []uint64, - body *block.Body, - notarizedHeadersHashes []string, - gasConsumptionData outport.HeaderGasConsumption, - sizeTxs int, - pool *outport.Pool, - ) (*data.Block, error) + PrepareBlockForDB(obh *outport.OutportBlockWithHeader) (*data.Block, error) ComputeHeaderHash(header coreData.HeaderHandler) ([]byte, error) SerializeEpochInfoData(header coreData.HeaderHandler, buffSlice *data.BufferSlice, index string) error @@ -65,45 +56,44 @@ type DBBlockHandler interface { // DBTransactionsHandler defines the actions that a transactions handler should do type DBTransactionsHandler interface { PrepareTransactionsForDatabase( - body *block.Body, + miniBlocks []*block.MiniBlock, header coreData.HeaderHandler, - pool *outport.Pool, + pool *outport.TransactionPool, isImportDB bool, numOfShards uint32, ) *data.PreparedResults GetHexEncodedHashesForRemove(header coreData.HeaderHandler, body *block.Body) ([]string, []string) SerializeReceipts(receipts []*data.Receipt, buffSlice *data.BufferSlice, index string) error - SerializeTransactions(transactions []*data.Transaction, txHashStatus map[string]string, selfShardID uint32, buffSlice *data.BufferSlice, index string) error + SerializeTransactions(transactions []*data.Transaction, txHashStatusInfo map[string]*outport.StatusInfo, selfShardID uint32, buffSlice *data.BufferSlice, index string) error SerializeTransactionsFeeData(txHashRefund map[string]*data.FeeData, buffSlice *data.BufferSlice, index string) error SerializeScResults(scResults []*data.ScResult, buffSlice *data.BufferSlice, index string) error } // DBMiniblocksHandler defines the actions that a miniblocks handler should do type DBMiniblocksHandler interface { - PrepareDBMiniblocks(header coreData.HeaderHandler, body *block.Body) []*data.Miniblock + PrepareDBMiniblocks(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) []*data.Miniblock GetMiniblocksHashesHexEncoded(header coreData.HeaderHandler, body *block.Body) []string - SerializeBulkMiniBlocks(bulkMbs []*data.Miniblock, mbsInDB map[string]bool, buffSlice *data.BufferSlice, index string, shardID uint32) + SerializeBulkMiniBlocks(bulkMbs []*data.Miniblock, buffSlice *data.BufferSlice, index string, shardID uint32) } // DBStatisticsHandler defines the actions that a database statistics handler should do type DBStatisticsHandler interface { - SerializeRoundsInfo(roundsInfo []*data.RoundInfo) *bytes.Buffer + SerializeRoundsInfo(rounds *outport.RoundsInfo) *bytes.Buffer } // DBValidatorsHandler defines the actions that a validators handler should do type DBValidatorsHandler interface { - PrepareValidatorsPublicKeys(shardValidatorsPubKeys [][]byte) *data.ValidatorsPublicKeys - SerializeValidatorsPubKeys(validatorsPubKeys *data.ValidatorsPublicKeys) (*bytes.Buffer, error) - SerializeValidatorsRating(index string, validatorsRatingInfo []*data.ValidatorRatingInfo) ([]*bytes.Buffer, error) + PrepareAnSerializeValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) ([]*bytes.Buffer, error) + SerializeValidatorsRating(ratingData *outport.ValidatorsRating) ([]*bytes.Buffer, error) } // DBLogsAndEventsHandler defines the actions that a logs and events handler should do type DBLogsAndEventsHandler interface { - PrepareLogsForDB(logsAndEvents []*coreData.LogData, timestamp uint64) []*data.Logs + PrepareLogsForDB(logsAndEvents []*outport.LogData, timestamp uint64) []*data.Logs ExtractDataFromLogs( - logsAndEvents []*coreData.LogData, + logsAndEvents []*outport.LogData, preparedResults *data.PreparedResults, timestamp uint64, shardID uint32, @@ -112,6 +102,7 @@ type DBLogsAndEventsHandler interface { SerializeLogs(logs []*data.Logs, buffSlice *data.BufferSlice, index string) error SerializeSCDeploys(deploysInfo map[string]*data.ScDeployInfo, buffSlice *data.BufferSlice, index string) error + SerializeChangeOwnerOperations(changeOwnerOperations map[string]*data.OwnerData, buffSlice *data.BufferSlice, index string) error SerializeTokens(tokens []*data.TokenInfo, updateNFTData []*data.NFTDataUpdate, buffSlice *data.BufferSlice, index string) error SerializeDelegators(delegators map[string]*data.Delegator, buffSlice *data.BufferSlice, index string) error SerializeSupplyData(tokensSupply data.TokensHandler, buffSlice *data.BufferSlice, index string) error diff --git a/process/elasticproc/logsevents/delegatorsProcessor.go b/process/elasticproc/logsevents/delegatorsProcessor.go index 953c7d4b..26fd3a82 100644 --- a/process/elasticproc/logsevents/delegatorsProcessor.go +++ b/process/elasticproc/logsevents/delegatorsProcessor.go @@ -80,11 +80,13 @@ func (dp *delegatorsProc) processEvent(args *argsProcessEvent) argOutputProcessE // topics[5:] = unDelegate fund keys in case of withdrawal activeStake := big.NewInt(0).SetBytes(topics[1]) - contractAddr := dp.pubkeyConverter.Encode(args.logAddress) + contractAddr := dp.pubkeyConverter.SilentEncode(args.logAddress, log) if len(topics) >= minNumTopicsDelegators+1 && eventIdentifierStr == delegateFunc { - contractAddr = dp.pubkeyConverter.Encode(topics[4]) + contractAddr = dp.pubkeyConverter.SilentEncode(topics[4], log) } + encodedAddr := dp.pubkeyConverter.SilentEncode(args.event.GetAddress(), log) + activeStakeNum, err := dp.balanceConverter.ComputeBalanceAsFloat(activeStake) if err != nil { log.Warn("delegatorsProc.processEvent cannot compute active stake as num", "active stake", activeStake, @@ -92,7 +94,7 @@ func (dp *delegatorsProc) processEvent(args *argsProcessEvent) argOutputProcessE } delegator := &data.Delegator{ - Address: dp.pubkeyConverter.Encode(args.event.GetAddress()), + Address: encodedAddr, Contract: contractAddr, ActiveStake: activeStake.String(), ActiveStakeNum: activeStakeNum, @@ -145,9 +147,12 @@ func (dp *delegatorsProc) getDelegatorFromClaimRewardsEvent(args *argsProcessEve return nil } + encodedAddr := dp.pubkeyConverter.SilentEncode(args.event.GetAddress(), log) + encodedContractAddr := dp.pubkeyConverter.SilentEncode(args.logAddress, log) + return &data.Delegator{ - Address: dp.pubkeyConverter.Encode(args.event.GetAddress()), - Contract: dp.pubkeyConverter.Encode(args.logAddress), + Address: encodedAddr, + Contract: encodedContractAddr, ShouldDelete: shouldDelete, } } diff --git a/process/elasticproc/logsevents/esdtIssueProcessor.go b/process/elasticproc/logsevents/esdtIssueProcessor.go index c348c833..2af66180 100644 --- a/process/elasticproc/logsevents/esdtIssueProcessor.go +++ b/process/elasticproc/logsevents/esdtIssueProcessor.go @@ -75,7 +75,7 @@ func (eip *esdtIssueProcessor) processEvent(args *argsProcessEvent) argOutputPro numDecimals = big.NewInt(0).SetBytes(topics[4]).Uint64() } - encodedAddr := eip.pubkeyConverter.Encode(args.event.GetAddress()) + encodedAddr := eip.pubkeyConverter.SilentEncode(args.event.GetAddress(), log) tokenInfo := &data.TokenInfo{ Token: string(topics[0]), @@ -96,7 +96,7 @@ func (eip *esdtIssueProcessor) processEvent(args *argsProcessEvent) argOutputPro } if identifierStr == transferOwnershipFunc && len(topics) >= numIssueLogTopics+1 { - newOwner := eip.pubkeyConverter.Encode(topics[4]) + newOwner := eip.pubkeyConverter.SilentEncode(topics[4], log) tokenInfo.TransferOwnership = true tokenInfo.CurrentOwner = newOwner tokenInfo.OwnersHistory[0].Address = newOwner diff --git a/process/elasticproc/logsevents/esdtPropertiesProcessor.go b/process/elasticproc/logsevents/esdtPropertiesProcessor.go index 49f293b6..004f9790 100644 --- a/process/elasticproc/logsevents/esdtPropertiesProcessor.go +++ b/process/elasticproc/logsevents/esdtPropertiesProcessor.go @@ -4,6 +4,7 @@ import ( "unicode" "github.com/multiversx/mx-chain-core-go/core" + vmcommon "github.com/multiversx/mx-chain-vm-common-go" ) const ( @@ -23,10 +24,12 @@ func newEsdtPropertiesProcessor(pubKeyConverter core.PubkeyConverter) *esdtPrope return &esdtPropertiesProc{ pubKeyConverter: pubKeyConverter, rolesOperationsIdentifiers: map[string]struct{}{ - core.BuiltInFunctionSetESDTRole: {}, - core.BuiltInFunctionUnSetESDTRole: {}, - core.BuiltInFunctionESDTNFTCreateRoleTransfer: {}, - upgradePropertiesEvent: {}, + core.BuiltInFunctionSetESDTRole: {}, + core.BuiltInFunctionUnSetESDTRole: {}, + core.BuiltInFunctionESDTNFTCreateRoleTransfer: {}, + upgradePropertiesEvent: {}, + vmcommon.BuiltInFunctionESDTUnSetBurnRoleForAll: {}, + vmcommon.BuiltInFunctionESDTSetBurnRoleForAll: {}, }, } } @@ -67,10 +70,16 @@ func (epp *esdtPropertiesProc) processEvent(args *argsProcessEvent) argOutputPro } } - shouldAddRole := identifier == core.BuiltInFunctionSetESDTRole - addrBech := epp.pubKeyConverter.Encode(args.event.GetAddress()) + shouldAddRole := identifier == core.BuiltInFunctionSetESDTRole || identifier == vmcommon.BuiltInFunctionESDTSetBurnRoleForAll + + addrBech := epp.pubKeyConverter.SilentEncode(args.event.GetAddress(), log) for _, roleBytes := range rolesBytes { - args.tokenRolesAndProperties.AddRole(string(topics[tokenTopicsIndex]), addrBech, string(roleBytes), shouldAddRole) + addr := addrBech + if string(roleBytes) == vmcommon.ESDTRoleBurnForAll { + addr = "" + } + + args.tokenRolesAndProperties.AddRole(string(topics[tokenTopicsIndex]), addr, string(roleBytes), shouldAddRole) } return argOutputProcessEvent{ @@ -81,7 +90,7 @@ func (epp *esdtPropertiesProc) processEvent(args *argsProcessEvent) argOutputPro func (epp *esdtPropertiesProc) extractDataNFTCreateRoleTransfer(args *argsProcessEvent) argOutputProcessEvent { topics := args.event.GetTopics() - addrBech := epp.pubKeyConverter.Encode(args.event.GetAddress()) + addrBech := epp.pubKeyConverter.SilentEncode(args.event.GetAddress(), log) shouldAddCreateRole := bytesToBool(topics[3]) args.tokenRolesAndProperties.AddRole(string(topics[tokenTopicsIndex]), addrBech, core.ESDTRoleNFTCreate, shouldAddCreateRole) diff --git a/process/elasticproc/logsevents/informativeLogsProcessor.go b/process/elasticproc/logsevents/informativeLogsProcessor.go index 1e64bf8b..3706f708 100644 --- a/process/elasticproc/logsevents/informativeLogsProcessor.go +++ b/process/elasticproc/logsevents/informativeLogsProcessor.go @@ -1,14 +1,11 @@ package logsevents import ( + "github.com/multiversx/mx-chain-core-go/core" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" ) -const ( - writeLogOperation = "writeLog" - signalErrorOperation = "signalError" -) - type informativeLogsProcessor struct { operations map[string]struct{} } @@ -16,8 +13,10 @@ type informativeLogsProcessor struct { func newInformativeLogsProcessor() *informativeLogsProcessor { return &informativeLogsProcessor{ operations: map[string]struct{}{ - writeLogOperation: {}, - signalErrorOperation: {}, + core.WriteLogIdentifier: {}, + core.SignalErrorOperation: {}, + core.CompletedTxEventIdentifier: {}, + core.InternalVMErrorsOperation: {}, }, } } @@ -31,19 +30,22 @@ func (ilp *informativeLogsProcessor) processEvent(args *argsProcessEvent) argOut tx, ok := args.txs[args.txHashHexEncoded] if !ok { - return argOutputProcessEvent{ - processed: true, - } + return processEventNoTx(args) } switch identifier { - case writeLogOperation: + case core.CompletedTxEventIdentifier: + { + tx.CompletedEvent = true + } + case core.WriteLogIdentifier: { tx.Status = transaction.TxStatusSuccess.String() } - case signalErrorOperation: + case core.SignalErrorOperation, core.InternalVMErrorsOperation: { tx.Status = transaction.TxStatusFail.String() + tx.ErrorEvent = true } } @@ -51,3 +53,31 @@ func (ilp *informativeLogsProcessor) processEvent(args *argsProcessEvent) argOut processed: true, } } + +func processEventNoTx(args *argsProcessEvent) argOutputProcessEvent { + scr, ok := args.scrs[args.txHashHexEncoded] + if !ok { + return argOutputProcessEvent{ + processed: true, + } + } + + record := &outport.StatusInfo{} + switch string(args.event.GetIdentifier()) { + case core.CompletedTxEventIdentifier: + { + record.CompletedEvent = true + } + case core.SignalErrorOperation, core.InternalVMErrorsOperation: + { + record.Status = transaction.TxStatusFail.String() + record.ErrorEvent = true + } + } + + args.txHashStatusInfoProc.addRecord(scr.OriginalTxHash, record) + + return argOutputProcessEvent{ + processed: true, + } +} diff --git a/process/elasticproc/logsevents/informativeLogsProcessor_test.go b/process/elasticproc/logsevents/informativeLogsProcessor_test.go index 6e4e64c5..649bd96f 100644 --- a/process/elasticproc/logsevents/informativeLogsProcessor_test.go +++ b/process/elasticproc/logsevents/informativeLogsProcessor_test.go @@ -3,6 +3,8 @@ package logsevents import ( "testing" + "github.com/multiversx/mx-chain-core-go/core" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/stretchr/testify/require" @@ -40,7 +42,7 @@ func TestInformativeLogsProcessorWriteLog(t *testing.T) { event := &transaction.Event{ Address: []byte("addr"), - Identifier: []byte(writeLogOperation), + Identifier: []byte(core.WriteLogIdentifier), } args := &argsProcessEvent{ timestamp: 1234, @@ -73,7 +75,7 @@ func TestInformativeLogsProcessorSignalError(t *testing.T) { event := &transaction.Event{ Address: []byte("addr"), - Identifier: []byte(signalErrorOperation), + Identifier: []byte(core.SignalErrorOperation), } args := &argsProcessEvent{ timestamp: 1234, @@ -88,5 +90,134 @@ func TestInformativeLogsProcessorSignalError(t *testing.T) { res := informativeLogsProc.processEvent(args) require.Equal(t, transaction.TxStatusFail.String(), tx.Status) + require.True(t, tx.ErrorEvent) require.Equal(t, true, res.processed) } + +func TestInformativeLogsProcessorCompletedEvent(t *testing.T) { + t.Parallel() + + tx := &data.Transaction{ + GasLimit: 200000, + GasPrice: 100000, + Data: []byte("callMe"), + } + + hexEncodedTxHash := "01020304" + txs := map[string]*data.Transaction{} + txs[hexEncodedTxHash] = tx + + event := &transaction.Event{ + Address: []byte("addr"), + Identifier: []byte(core.CompletedTxEventIdentifier), + } + args := &argsProcessEvent{ + timestamp: 1234, + event: event, + logAddress: []byte("contract"), + txs: txs, + txHashHexEncoded: hexEncodedTxHash, + } + + informativeLogsProc := newInformativeLogsProcessor() + + res := informativeLogsProc.processEvent(args) + + require.True(t, tx.CompletedEvent) + require.Equal(t, true, res.processed) +} + +func TestInformativeLogsProcessorLogsGeneratedByScrsSignalError(t *testing.T) { + t.Parallel() + + txHash := "txHash" + scrHash := "scrHash" + scr := &data.ScResult{ + OriginalTxHash: txHash, + } + scrs := make(map[string]*data.ScResult) + scrs[scrHash] = scr + + event := &transaction.Event{ + Address: []byte("addr"), + Identifier: []byte(core.SignalErrorOperation), + } + + txStatusProc := newTxHashStatusInfoProcessor() + args := &argsProcessEvent{ + timestamp: 1234, + event: event, + logAddress: []byte("contract"), + scrs: scrs, + txHashHexEncoded: scrHash, + txHashStatusInfoProc: txStatusProc, + } + + informativeLogsProc := newInformativeLogsProcessor() + res := informativeLogsProc.processEvent(args) + require.True(t, res.processed) + + require.Equal(t, &outport.StatusInfo{ + Status: transaction.TxStatusFail.String(), + ErrorEvent: true, + }, txStatusProc.getAllRecords()[txHash]) +} + +func TestInformativeLogsProcessorLogsGeneratedByScrsCompletedEvent(t *testing.T) { + t.Parallel() + + txHash := "txHash" + scrHash := "scrHash" + scr := &data.ScResult{ + OriginalTxHash: txHash, + } + scrs := make(map[string]*data.ScResult) + scrs[scrHash] = scr + + event := &transaction.Event{ + Address: []byte("addr"), + Identifier: []byte(core.CompletedTxEventIdentifier), + } + + txStatusProc := newTxHashStatusInfoProcessor() + args := &argsProcessEvent{ + timestamp: 1234, + event: event, + logAddress: []byte("contract"), + scrs: scrs, + txHashHexEncoded: scrHash, + txHashStatusInfoProc: txStatusProc, + } + + informativeLogsProc := newInformativeLogsProcessor() + res := informativeLogsProc.processEvent(args) + require.True(t, res.processed) + + require.Equal(t, &outport.StatusInfo{ + CompletedEvent: true, + }, txStatusProc.getAllRecords()[txHash]) +} + +func TestInformativeLogsProcessorLogsGeneratedByScrNotFoundInMap(t *testing.T) { + t.Parallel() + + scrHash := "scrHash" + + event := &transaction.Event{ + Address: []byte("addr"), + Identifier: []byte(core.CompletedTxEventIdentifier), + } + + txStatusProc := newTxHashStatusInfoProcessor() + args := &argsProcessEvent{ + timestamp: 1234, + event: event, + logAddress: []byte("contract"), + txHashHexEncoded: scrHash, + txHashStatusInfoProc: txStatusProc, + } + + informativeLogsProc := newInformativeLogsProcessor() + res := informativeLogsProc.processEvent(args) + require.True(t, res.processed) +} diff --git a/process/elasticproc/logsevents/interface.go b/process/elasticproc/logsevents/interface.go index c6092353..4d434cef 100644 --- a/process/elasticproc/logsevents/interface.go +++ b/process/elasticproc/logsevents/interface.go @@ -2,6 +2,7 @@ package logsevents import ( coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/tokeninfo" ) @@ -9,11 +10,14 @@ import ( type argsProcessEvent struct { txHashHexEncoded string scDeploys map[string]*data.ScDeployInfo + changeOwnerOperations map[string]*data.OwnerData txs map[string]*data.Transaction + scrs map[string]*data.ScResult event coreData.EventHandler tokens data.TokensHandler tokensSupply data.TokensHandler tokenRolesAndProperties *tokeninfo.TokenRolesAndProperties + txHashStatusInfoProc txHashStatusInfoHandler timestamp uint64 logAddress []byte selfShardID uint32 @@ -30,3 +34,8 @@ type argOutputProcessEvent struct { type eventsProcessor interface { processEvent(args *argsProcessEvent) argOutputProcessEvent } + +type txHashStatusInfoHandler interface { + addRecord(hash string, statusInfo *outport.StatusInfo) + getAllRecords() map[string]*outport.StatusInfo +} diff --git a/process/elasticproc/logsevents/logsAndEventsProcessor.go b/process/elasticproc/logsevents/logsAndEventsProcessor.go index b7a82c73..f633de3c 100644 --- a/process/elasticproc/logsevents/logsAndEventsProcessor.go +++ b/process/elasticproc/logsevents/logsAndEventsProcessor.go @@ -1,12 +1,13 @@ package logsevents import ( - "encoding/hex" "time" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" "github.com/multiversx/mx-chain-es-indexer-go/data" @@ -86,7 +87,7 @@ func createEventsProcessors(args ArgsLogsAndEventsProcessor) []eventsProcessor { // ExtractDataFromLogs will extract data from the provided logs and events and put in altered addresses func (lep *logsAndEventsProcessor) ExtractDataFromLogs( - logsAndEvents []*coreData.LogData, + logsAndEvents []*outport.LogData, preparedResults *data.PreparedResults, timestamp uint64, shardID uint32, @@ -95,20 +96,19 @@ func (lep *logsAndEventsProcessor) ExtractDataFromLogs( lep.logsData = newLogsData(timestamp, preparedResults.Transactions, preparedResults.ScResults) for _, txLog := range logsAndEvents { - if txLog == nil || check.IfNil(txLog.LogHandler) { + if txLog == nil { continue } - events := txLog.LogHandler.GetLogEvents() - lep.processEvents(txLog.TxHash, txLog.LogHandler.GetAddress(), events, shardID, numOfShards) + events := txLog.Log.Events + lep.processEvents(txLog.TxHash, txLog.Log.Address, events, shardID, numOfShards) - txHashHexEncoded := hex.EncodeToString([]byte(txLog.TxHash)) - tx, ok := lep.logsData.txsMap[txHashHexEncoded] + tx, ok := lep.logsData.txsMap[txLog.TxHash] if ok { tx.HasLogs = true continue } - scr, ok := lep.logsData.scrsMap[txHashHexEncoded] + scr, ok := lep.logsData.scrsMap[txLog.TxHash] if ok { scr.HasLogs = true continue @@ -123,21 +123,22 @@ func (lep *logsAndEventsProcessor) ExtractDataFromLogs( Delegators: lep.logsData.delegators, NFTsDataUpdates: lep.logsData.nftsDataUpdates, TokenRolesAndProperties: lep.logsData.tokenRolesAndProperties, + TxHashStatusInfo: lep.logsData.txHashStatusInfoProc.getAllRecords(), + ChangeOwnerOperations: lep.logsData.changeOwnerOperations, } } -func (lep *logsAndEventsProcessor) processEvents(logHash string, logAddress []byte, events []coreData.EventHandler, shardID uint32, numOfShards uint32) { +func (lep *logsAndEventsProcessor) processEvents(logHashHexEncoded string, logAddress []byte, events []*transaction.Event, shardID uint32, numOfShards uint32) { for _, event := range events { if check.IfNil(event) { continue } - lep.processEvent(logHash, logAddress, event, shardID, numOfShards) + lep.processEvent(logHashHexEncoded, logAddress, event, shardID, numOfShards) } } -func (lep *logsAndEventsProcessor) processEvent(logHash string, logAddress []byte, event coreData.EventHandler, shardID uint32, numOfShards uint32) { - logHashHexEncoded := hex.EncodeToString([]byte(logHash)) +func (lep *logsAndEventsProcessor) processEvent(logHashHexEncoded string, logAddress []byte, event coreData.EventHandler, shardID uint32, numOfShards uint32) { for _, proc := range lep.eventsProcessors { res := proc.processEvent(&argsProcessEvent{ event: event, @@ -148,7 +149,10 @@ func (lep *logsAndEventsProcessor) processEvent(logHash string, logAddress []byt timestamp: lep.logsData.timestamp, scDeploys: lep.logsData.scDeploys, txs: lep.logsData.txsMap, + scrs: lep.logsData.scrsMap, tokenRolesAndProperties: lep.logsData.tokenRolesAndProperties, + txHashStatusInfoProc: lep.logsData.txHashStatusInfoProc, + changeOwnerOperations: lep.logsData.changeOwnerOperations, selfShardID: shardID, numOfShards: numOfShards, }) @@ -181,54 +185,56 @@ func (lep *logsAndEventsProcessor) processEvent(logHash string, logAddress []byt // PrepareLogsForDB will prepare logs for database func (lep *logsAndEventsProcessor) PrepareLogsForDB( - logsAndEvents []*coreData.LogData, + logsAndEvents []*outport.LogData, timestamp uint64, ) []*data.Logs { logs := make([]*data.Logs, 0, len(logsAndEvents)) for _, txLog := range logsAndEvents { - if txLog == nil || check.IfNil(txLog.LogHandler) { + if txLog == nil { continue } - logs = append(logs, lep.prepareLogsForDB(txLog.TxHash, txLog.LogHandler, timestamp)) + logs = append(logs, lep.prepareLogsForDB(txLog.TxHash, txLog.Log, timestamp)) } return logs } func (lep *logsAndEventsProcessor) prepareLogsForDB( - id string, - logHandler coreData.LogHandler, + logHashHex string, + eventLogs *transaction.Log, timestamp uint64, ) *data.Logs { - encodedID := hex.EncodeToString([]byte(id)) originalTxHash := "" - scr, ok := lep.logsData.scrsMap[encodedID] + scr, ok := lep.logsData.scrsMap[logHashHex] if ok { originalTxHash = scr.OriginalTxHash } - events := logHandler.GetLogEvents() + encodedAddr := lep.pubKeyConverter.SilentEncode(eventLogs.GetAddress(), log) logsDB := &data.Logs{ - ID: encodedID, + ID: logHashHex, OriginalTxHash: originalTxHash, - Address: lep.pubKeyConverter.Encode(logHandler.GetAddress()), + Address: encodedAddr, Timestamp: time.Duration(timestamp), - Events: make([]*data.Event, 0, len(events)), + Events: make([]*data.Event, 0, len(eventLogs.Events)), } - for idx, event := range events { + for idx, event := range eventLogs.Events { if check.IfNil(event) { continue } + encodedAddress := lep.pubKeyConverter.SilentEncode(event.GetAddress(), log) + logsDB.Events = append(logsDB.Events, &data.Event{ - Address: lep.pubKeyConverter.Encode(event.GetAddress()), - Identifier: string(event.GetIdentifier()), - Topics: event.GetTopics(), - Data: event.GetData(), - Order: idx, + Address: encodedAddress, + Identifier: string(event.GetIdentifier()), + Topics: event.GetTopics(), + Data: event.GetData(), + AdditionalData: event.GetAdditionalData(), + Order: idx, }) } diff --git a/process/elasticproc/logsevents/logsAndEventsProcessor_test.go b/process/elasticproc/logsevents/logsAndEventsProcessor_test.go index c2ef33fa..31e4ea90 100644 --- a/process/elasticproc/logsevents/logsAndEventsProcessor_test.go +++ b/process/elasticproc/logsevents/logsAndEventsProcessor_test.go @@ -1,12 +1,13 @@ package logsevents import ( + "encoding/hex" "math/big" "testing" "time" "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/mock" @@ -57,79 +58,87 @@ func TestNewLogsAndEventsProcessor(t *testing.T) { func TestLogsAndEventsProcessor_ExtractDataFromLogsAndPutInAltered(t *testing.T) { t.Parallel() - logsAndEvents := map[string]coreData.LogHandler{ - "wrong": nil, - "h3": &transaction.Log{ - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(core.SCDeployIdentifier), - Topics: [][]byte{[]byte("addr1"), []byte("addr2")}, + logsAndEvents := []*outport.LogData{ + nil, + { + TxHash: hex.EncodeToString([]byte("h3")), + Log: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.SCDeployIdentifier), + Topics: [][]byte{[]byte("addr1"), []byte("addr2"), []byte("codeHash")}, + }, }, }, }, - - "h1": &transaction.Log{ - Address: []byte("address"), - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(core.BuiltInFunctionESDTNFTTransfer), - Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), big.NewInt(100).Bytes(), []byte("receiver")}, + { + TxHash: hex.EncodeToString([]byte("h1")), + Log: &transaction.Log{ + Address: []byte("address"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTNFTTransfer), + Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), big.NewInt(100).Bytes(), []byte("receiver")}, + }, }, }, }, - - "h2": &transaction.Log{ - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(core.BuiltInFunctionESDTTransfer), - Topics: [][]byte{[]byte("esdt"), big.NewInt(0).Bytes(), big.NewInt(0).SetUint64(100).Bytes(), []byte("receiver")}, + { + TxHash: hex.EncodeToString([]byte("h2")), + Log: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTTransfer), + Topics: [][]byte{[]byte("esdt"), big.NewInt(0).Bytes(), big.NewInt(0).SetUint64(100).Bytes(), []byte("receiver")}, + }, + nil, }, - nil, }, }, - "h4": &transaction.Log{ - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(issueSemiFungibleESDTFunc), - Topics: [][]byte{[]byte("SEMI-abcd"), []byte("semi-token"), []byte("SEMI"), []byte(core.SemiFungibleESDT)}, + { + TxHash: hex.EncodeToString([]byte("h4")), + Log: &transaction.Log{ + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(issueSemiFungibleESDTFunc), + Topics: [][]byte{[]byte("SEMI-abcd"), []byte("semi-token"), []byte("SEMI"), []byte(core.SemiFungibleESDT)}, + }, + nil, }, - nil, }, }, - "h5": &transaction.Log{ - Address: []byte("contract"), - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(delegateFunc), - Topics: [][]byte{big.NewInt(1000).Bytes(), big.NewInt(1000000000).Bytes(), big.NewInt(10).Bytes(), big.NewInt(1000000000).Bytes()}, + { + TxHash: hex.EncodeToString([]byte("h5")), + Log: &transaction.Log{ + Address: []byte("contract"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(delegateFunc), + Topics: [][]byte{big.NewInt(1000).Bytes(), big.NewInt(1000000000).Bytes(), big.NewInt(10).Bytes(), big.NewInt(1000000000).Bytes()}, + }, }, }, }, - "h6": &transaction.Log{ - Address: []byte("contract-second"), - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(delegateFunc), - Topics: [][]byte{big.NewInt(1000).Bytes(), big.NewInt(1000000000).Bytes(), big.NewInt(10).Bytes(), big.NewInt(1000000000).Bytes()}, + { + TxHash: hex.EncodeToString([]byte("h6")), + Log: &transaction.Log{ + Address: []byte("contract-second"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(delegateFunc), + Topics: [][]byte{big.NewInt(1000).Bytes(), big.NewInt(1000000000).Bytes(), big.NewInt(10).Bytes(), big.NewInt(1000000000).Bytes()}, + }, }, }, }, } - logsAndEventsSlice := make([]*coreData.LogData, 0) - for hash, val := range logsAndEvents { - logsAndEventsSlice = append(logsAndEventsSlice, &coreData.LogData{ - TxHash: hash, - LogHandler: val, - }) - } - res := &data.PreparedResults{ Transactions: []*data.Transaction{ { @@ -148,7 +157,7 @@ func TestLogsAndEventsProcessor_ExtractDataFromLogsAndPutInAltered(t *testing.T) args.BalanceConverter = balanceConverter proc, _ := NewLogsAndEventsProcessor(args) - resLogs := proc.ExtractDataFromLogs(logsAndEventsSlice, res, 1000, core.MetachainShardId, 3) + resLogs := proc.ExtractDataFromLogs(logsAndEvents, res, 1000, core.MetachainShardId, 3) require.NotNil(t, resLogs.Tokens) require.True(t, res.Transactions[0].HasOperations) require.True(t, res.ScResults[0].HasOperations) @@ -156,9 +165,11 @@ func TestLogsAndEventsProcessor_ExtractDataFromLogsAndPutInAltered(t *testing.T) require.True(t, res.ScResults[0].HasLogs) require.Equal(t, &data.ScDeployInfo{ - TxHash: "6833", - Creator: "6164647232", - Timestamp: uint64(1000), + TxHash: "6833", + Creator: "6164647232", + CurrentOwner: "6164647232", + Timestamp: uint64(1000), + CodeHash: []byte("codeHash"), }, resLogs.ScDeploys["6164647231"]) require.Equal(t, &data.TokenInfo{ @@ -197,29 +208,24 @@ func TestLogsAndEventsProcessor_ExtractDataFromLogsAndPutInAltered(t *testing.T) func TestLogsAndEventsProcessor_PrepareLogsForDB(t *testing.T) { t.Parallel() - logsAndEvents := map[string]coreData.LogHandler{ - "wrong": nil, - - "txHash": &transaction.Log{ - Address: []byte("address"), - Events: []*transaction.Event{ - { - Address: []byte("addr"), - Identifier: []byte(core.BuiltInFunctionESDTNFTTransfer), - Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), []byte("receiver")}, + logsAndEvents := []*outport.LogData{ + nil, + { + TxHash: hex.EncodeToString([]byte("txHash")), + Log: &transaction.Log{ + Address: []byte("address"), + Events: []*transaction.Event{ + { + Address: []byte("addr"), + Identifier: []byte(core.BuiltInFunctionESDTNFTTransfer), + Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), []byte("receiver")}, + AdditionalData: [][]byte{[]byte("something")}, + }, }, }, }, } - logsAndEventsSlice := make([]*coreData.LogData, 0) - for hash, val := range logsAndEvents { - logsAndEventsSlice = append(logsAndEventsSlice, &coreData.LogData{ - TxHash: hash, - LogHandler: val, - }) - } - args := createMockArgs() proc, _ := NewLogsAndEventsProcessor(args) @@ -230,7 +236,7 @@ func TestLogsAndEventsProcessor_PrepareLogsForDB(t *testing.T) { }, }}, 1234, 0, 3) - logsDB := proc.PrepareLogsForDB(logsAndEventsSlice, 1234) + logsDB := proc.PrepareLogsForDB(logsAndEvents, 1234) require.Equal(t, &data.Logs{ ID: "747848617368", Address: "61646472657373", @@ -238,9 +244,10 @@ func TestLogsAndEventsProcessor_PrepareLogsForDB(t *testing.T) { Timestamp: time.Duration(1234), Events: []*data.Event{ { - Address: "61646472", - Identifier: core.BuiltInFunctionESDTNFTTransfer, - Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), []byte("receiver")}, + Address: "61646472", + Identifier: core.BuiltInFunctionESDTNFTTransfer, + Topics: [][]byte{[]byte("my-token"), big.NewInt(0).SetUint64(1).Bytes(), []byte("receiver")}, + AdditionalData: [][]byte{[]byte("something")}, }, }, }, logsDB[0]) @@ -249,9 +256,10 @@ func TestLogsAndEventsProcessor_PrepareLogsForDB(t *testing.T) { func TestLogsAndEventsProcessor_ExtractDataFromLogsNFTBurn(t *testing.T) { t.Parallel() - logsAndEventsSlice := make([]*coreData.LogData, 1) - logsAndEventsSlice[0] = &coreData.LogData{ - LogHandler: &transaction.Log{ + logsAndEventsSlice := make([]*outport.LogData, 1) + logsAndEventsSlice[0] = &outport.LogData{ + TxHash: "h1", + Log: &transaction.Log{ Address: []byte("address"), Events: []*transaction.Event{ { @@ -261,7 +269,6 @@ func TestLogsAndEventsProcessor_ExtractDataFromLogsNFTBurn(t *testing.T) { }, }, }, - TxHash: "h1", } res := &data.PreparedResults{ diff --git a/process/elasticproc/logsevents/logsData.go b/process/elasticproc/logsevents/logsData.go index 0936e0f0..caacb880 100644 --- a/process/elasticproc/logsevents/logsData.go +++ b/process/elasticproc/logsevents/logsData.go @@ -8,11 +8,13 @@ import ( type logsData struct { timestamp uint64 + txHashStatusInfoProc txHashStatusInfoHandler tokens data.TokensHandler tokensSupply data.TokensHandler txsMap map[string]*data.Transaction scrsMap map[string]*data.ScResult scDeploys map[string]*data.ScDeployInfo + changeOwnerOperations map[string]*data.OwnerData delegators map[string]*data.Delegator tokensInfo []*data.TokenInfo nftsDataUpdates []*data.NFTDataUpdate @@ -34,8 +36,10 @@ func newLogsData( ld.scDeploys = make(map[string]*data.ScDeployInfo) ld.tokensInfo = make([]*data.TokenInfo, 0) ld.delegators = make(map[string]*data.Delegator) + ld.changeOwnerOperations = make(map[string]*data.OwnerData) ld.nftsDataUpdates = make([]*data.NFTDataUpdate, 0) ld.tokenRolesAndProperties = tokeninfo.NewTokenRolesAndProperties() + ld.txHashStatusInfoProc = newTxHashStatusInfoProcessor() return ld } diff --git a/process/elasticproc/logsevents/nftsProcessor.go b/process/elasticproc/logsevents/nftsProcessor.go index c516e523..95716f15 100644 --- a/process/elasticproc/logsevents/nftsProcessor.go +++ b/process/elasticproc/logsevents/nftsProcessor.go @@ -7,8 +7,8 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/sharding" coreData "github.com/multiversx/mx-chain-core-go/data" + "github.com/multiversx/mx-chain-core-go/data/alteredAccount" "github.com/multiversx/mx-chain-core-go/data/esdt" - "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/marshal" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" @@ -154,15 +154,19 @@ func (np *nftsProcessor) processNFTEventOnSender( }) } -func (np *nftsProcessor) convertMetaData(metaData *esdt.MetaData) *outport.TokenMetaData { +func (np *nftsProcessor) convertMetaData(metaData *esdt.MetaData) *alteredAccount.TokenMetaData { if metaData == nil { return nil } + encodedCreatorAddr, err := np.pubKeyConverter.Encode(metaData.Creator) + if err != nil { + log.Warn("nftsProcessor.convertMetaData", "cannot encode creator address", "error", err, "address", metaData.Creator) + } - return &outport.TokenMetaData{ + return &alteredAccount.TokenMetaData{ Nonce: metaData.Nonce, Name: string(metaData.Name), - Creator: np.pubKeyConverter.Encode(metaData.Creator), + Creator: encodedCreatorAddr, Royalties: metaData.Royalties, Hash: metaData.Hash, URIs: metaData.URIs, diff --git a/process/elasticproc/logsevents/nftsPropertiesProcessor.go b/process/elasticproc/logsevents/nftsPropertiesProcessor.go index c21d819c..990e844e 100644 --- a/process/elasticproc/logsevents/nftsPropertiesProcessor.go +++ b/process/elasticproc/logsevents/nftsPropertiesProcessor.go @@ -30,13 +30,14 @@ func newNFTsPropertiesProcessor(pubKeyConverter core.PubkeyConverter) *nftsPrope } func (npp *nftsPropertiesProc) processEvent(args *argsProcessEvent) argOutputProcessEvent { + //nolint eventIdentifier := string(args.event.GetIdentifier()) _, ok := npp.propertiesChangeOperations[eventIdentifier] if !ok { return argOutputProcessEvent{} } - callerAddress := npp.pubKeyConverter.Encode(args.event.GetAddress()) + callerAddress := npp.pubKeyConverter.SilentEncode(args.event.GetAddress(), log) if callerAddress == "" { return argOutputProcessEvent{ processed: true, @@ -59,6 +60,13 @@ func (npp *nftsPropertiesProc) processEvent(args *argsProcessEvent) argOutputPro } } + callerAddress = npp.pubKeyConverter.SilentEncode(args.event.GetAddress(), log) + if callerAddress == "" { + return argOutputProcessEvent{ + processed: true, + } + } + nonceBig := big.NewInt(0).SetBytes(topics[1]) if nonceBig.Uint64() == 0 { // this is a fungible token so we should return diff --git a/process/elasticproc/logsevents/scDeploys.go b/process/elasticproc/logsevents/scDeploys.go index 95e2d76b..03c990ff 100644 --- a/process/elasticproc/logsevents/scDeploys.go +++ b/process/elasticproc/logsevents/scDeploys.go @@ -1,10 +1,17 @@ package logsevents import ( + "time" + "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-es-indexer-go/data" ) +const ( + numTopicsChangeOwner = 1 + minTopicsContractEvent = 3 +) + type scDeploysProcessor struct { scDeploysIdentifiers map[string]struct{} pubKeyConverter core.PubkeyConverter @@ -14,8 +21,9 @@ func newSCDeploysProcessor(pubKeyConverter core.PubkeyConverter) *scDeploysProce return &scDeploysProcessor{ pubKeyConverter: pubKeyConverter, scDeploysIdentifiers: map[string]struct{}{ - core.SCDeployIdentifier: {}, - core.SCUpgradeIdentifier: {}, + core.SCDeployIdentifier: {}, + core.SCUpgradeIdentifier: {}, + core.BuiltInFunctionChangeOwnerAddress: {}, }, } } @@ -28,17 +36,40 @@ func (sdp *scDeploysProcessor) processEvent(args *argsProcessEvent) argOutputPro } topics := args.event.GetTopics() - if len(topics) < 2 { + isChangeOwnerEvent := len(topics) == numTopicsChangeOwner && eventIdentifier == core.BuiltInFunctionChangeOwnerAddress + if isChangeOwnerEvent { + return sdp.processChangeOwnerEvent(args) + } + + if len(topics) < minTopicsContractEvent { return argOutputProcessEvent{ processed: true, } } - scAddress := sdp.pubKeyConverter.Encode(topics[0]) + scAddress := sdp.pubKeyConverter.SilentEncode(topics[0], log) + creatorAddress := sdp.pubKeyConverter.SilentEncode(topics[1], log) + args.scDeploys[scAddress] = &data.ScDeployInfo{ + TxHash: args.txHashHexEncoded, + Creator: creatorAddress, + CurrentOwner: creatorAddress, + CodeHash: topics[2], + Timestamp: args.timestamp, + } + + return argOutputProcessEvent{ + processed: true, + } +} + +func (sdp *scDeploysProcessor) processChangeOwnerEvent(args *argsProcessEvent) argOutputProcessEvent { + scAddress := sdp.pubKeyConverter.SilentEncode(args.event.GetAddress(), log) + newOwner := sdp.pubKeyConverter.SilentEncode(args.event.GetTopics()[0], log) + args.changeOwnerOperations[scAddress] = &data.OwnerData{ TxHash: args.txHashHexEncoded, - Creator: sdp.pubKeyConverter.Encode(topics[1]), - Timestamp: args.timestamp, + Address: newOwner, + Timestamp: time.Duration(args.timestamp), } return argOutputProcessEvent{ diff --git a/process/elasticproc/logsevents/scDeploys_test.go b/process/elasticproc/logsevents/scDeploys_test.go index dc602cf0..a725fb4b 100644 --- a/process/elasticproc/logsevents/scDeploys_test.go +++ b/process/elasticproc/logsevents/scDeploys_test.go @@ -18,7 +18,7 @@ func TestScDeploysProcessor(t *testing.T) { event := &transaction.Event{ Address: []byte("addr"), Identifier: []byte(core.SCDeployIdentifier), - Topics: [][]byte{[]byte("addr1"), []byte("addr2")}, + Topics: [][]byte{[]byte("addr1"), []byte("addr2"), []byte("codeHash")}, } scDeploys := map[string]*data.ScDeployInfo{} @@ -31,8 +31,35 @@ func TestScDeploysProcessor(t *testing.T) { require.True(t, res.processed) require.Equal(t, &data.ScDeployInfo{ - TxHash: "01020304", - Creator: "6164647232", - Timestamp: uint64(1000), + TxHash: "01020304", + Creator: "6164647232", + Timestamp: uint64(1000), + CurrentOwner: "6164647232", + CodeHash: []byte("codeHash"), }, scDeploys["6164647231"]) } + +func TestScDeploysProcessorChangeOwner(t *testing.T) { + event := &transaction.Event{ + Address: []byte("contractAddr"), + Identifier: []byte(core.BuiltInFunctionChangeOwnerAddress), + Topics: [][]byte{[]byte("newOwner")}, + } + + scDeploysProc := newSCDeploysProcessor(&mock.PubkeyConverterMock{}) + + changeOwnerOperations := map[string]*data.OwnerData{} + res := scDeploysProc.processEvent(&argsProcessEvent{ + event: event, + changeOwnerOperations: changeOwnerOperations, + timestamp: 2000, + txHashHexEncoded: "01020304", + }) + require.True(t, res.processed) + + require.Equal(t, &data.OwnerData{ + TxHash: "01020304", + Address: "6e65774f776e6572", + Timestamp: 2000, + }, changeOwnerOperations["636f6e747261637441646472"]) +} diff --git a/process/elasticproc/logsevents/serialize.go b/process/elasticproc/logsevents/serialize.go index f519074d..488b1282 100644 --- a/process/elasticproc/logsevents/serialize.go +++ b/process/elasticproc/logsevents/serialize.go @@ -49,6 +49,44 @@ func (*logsAndEventsProcessor) SerializeLogs(logs []*data.Logs, buffSlice *data. return nil } +// SerializeChangeOwnerOperations will serialize the provided change owner operations in a way that Elasticsearch expects a bulk request +func (*logsAndEventsProcessor) SerializeChangeOwnerOperations(changeOwnerOperations map[string]*data.OwnerData, buffSlice *data.BufferSlice, index string) error { + for scAddr, ownerData := range changeOwnerOperations { + meta := []byte(fmt.Sprintf(`{ "update" : { "_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(scAddr), "\n")) + ownerDataBytes, err := json.Marshal(ownerData) + if err != nil { + return err + } + + codeToExecute := ` + if ('create' == ctx.op) { + return + } else { + ctx._source.currentOwner = params.ownerData.address; + if (!ctx._source.containsKey('owners')) { + ctx._source.owners = [params.ownerData]; + } else { + ctx._source.owners.add(params.ownerData); + } + } +` + serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ + `"source": "%s",`+ + `"lang": "painless",`+ + `"params": { "ownerData": %s }},`+ + `"upsert": {}}`, + converters.FormatPainlessSource(codeToExecute), ownerDataBytes, + ) + + err = buffSlice.PutData(meta, []byte(serializedDataStr)) + if err != nil { + return err + } + } + + return nil +} + // SerializeSCDeploys will serialize the provided smart contract deploys in a way that Elasticsearch expects a bulk request func (*logsAndEventsProcessor) SerializeSCDeploys(deploys map[string]*data.ScDeployInfo, buffSlice *data.BufferSlice, index string) error { for scAddr, deployInfo := range deploys { @@ -70,6 +108,7 @@ func (*logsAndEventsProcessor) SerializeSCDeploys(deploys map[string]*data.ScDep func serializeDeploy(deployInfo *data.ScDeployInfo) ([]byte, error) { deployInfo.Upgrades = make([]*data.Upgrade, 0) + deployInfo.OwnersHistory = make([]*data.OwnerData, 0) serializedData, errPrepareD := json.Marshal(deployInfo) if errPrepareD != nil { return nil, errPrepareD @@ -79,6 +118,7 @@ func serializeDeploy(deployInfo *data.ScDeployInfo) ([]byte, error) { TxHash: deployInfo.TxHash, Upgrader: deployInfo.Creator, Timestamp: deployInfo.Timestamp, + CodeHash: deployInfo.CodeHash, } upgradeSerialized, errPrepareU := json.Marshal(upgradeData) if errPrepareU != nil { @@ -259,7 +299,10 @@ func serializeRoleData(buffSlice *data.BufferSlice, rd *tokeninfo.RoleData, role codeToExecute := ` if (ctx._source.containsKey('roles')) { if (ctx._source.roles.containsKey(params.role)) { - ctx._source.roles.get(params.role).removeIf(p -> p.equals(params.address)) + ctx._source.roles.get(params.role).removeIf(p -> p.equals(params.address)); + if (ctx._source.roles.get(params.role).length == 0) { + ctx._source.roles.remove(params.role) + } } } ` diff --git a/process/elasticproc/logsevents/serialize_test.go b/process/elasticproc/logsevents/serialize_test.go index ad01c59f..99d80304 100644 --- a/process/elasticproc/logsevents/serialize_test.go +++ b/process/elasticproc/logsevents/serialize_test.go @@ -57,7 +57,7 @@ func TestLogsAndEventsProcessor_SerializeSCDeploys(t *testing.T) { require.Nil(t, err) expectedRes := `{ "update" : { "_index":"scdeploys", "_id" : "scAddr" } } -{"script": {"source": "if (!ctx._source.containsKey('upgrades')) {ctx._source.upgrades = [params.elem];} else {ctx._source.upgrades.add(params.elem);}","lang": "painless","params": {"elem": {"upgradeTxHash":"hash","upgrader":"creator","timestamp":123}}},"upsert": {"deployTxHash":"hash","deployer":"creator","timestamp":123,"upgrades":[]}} +{"script": {"source": "if (!ctx._source.containsKey('upgrades')) {ctx._source.upgrades = [params.elem];} else {ctx._source.upgrades.add(params.elem);}","lang": "painless","params": {"elem": {"upgradeTxHash":"hash","upgrader":"creator","timestamp":123,"codeHash":null}}},"upsert": {"deployTxHash":"hash","deployer":"creator","currentOwner":"","initialCodeHash":null,"timestamp":123,"upgrades":[],"owners":[]}} ` require.Equal(t, expectedRes, buffSlice.Buffers()[0].String()) } diff --git a/process/elasticproc/logsevents/statusInfo.go b/process/elasticproc/logsevents/statusInfo.go new file mode 100644 index 00000000..80d89316 --- /dev/null +++ b/process/elasticproc/logsevents/statusInfo.go @@ -0,0 +1,38 @@ +package logsevents + +import ( + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/transaction" +) + +type txHashStatusInfoProc struct { + hashStatusInfo map[string]*outport.StatusInfo +} + +// newTxHashStatusInfoProcessor will create a new instance of txHashStatusInfoProc +func newTxHashStatusInfoProcessor() *txHashStatusInfoProc { + return &txHashStatusInfoProc{ + hashStatusInfo: make(map[string]*outport.StatusInfo), + } +} + +// addRecord will add a new record for the given hash +func (ths *txHashStatusInfoProc) addRecord(hash string, statusInfo *outport.StatusInfo) { + statusInfoFromMap, found := ths.hashStatusInfo[hash] + if !found { + ths.hashStatusInfo[hash] = statusInfo + return + } + + if statusInfoFromMap.Status != transaction.TxStatusFail.String() { + statusInfoFromMap.Status = statusInfo.Status + } + + statusInfoFromMap.ErrorEvent = statusInfoFromMap.ErrorEvent || statusInfo.ErrorEvent + statusInfoFromMap.CompletedEvent = statusInfoFromMap.CompletedEvent || statusInfo.CompletedEvent +} + +// getAllRecords will return all the records +func (ths *txHashStatusInfoProc) getAllRecords() map[string]*outport.StatusInfo { + return ths.hashStatusInfo +} diff --git a/process/elasticproc/logsevents/statusInfo_test.go b/process/elasticproc/logsevents/statusInfo_test.go new file mode 100644 index 00000000..280c8570 --- /dev/null +++ b/process/elasticproc/logsevents/statusInfo_test.go @@ -0,0 +1,47 @@ +package logsevents + +import ( + "testing" + + "github.com/multiversx/mx-chain-core-go/data/outport" + "github.com/multiversx/mx-chain-core-go/data/transaction" + "github.com/stretchr/testify/require" +) + +func TestStatusInfoAddRecord(t *testing.T) { + t.Parallel() + + statusInfoProc := newTxHashStatusInfoProcessor() + + txHash := "txHash1" + statusInfoProc.addRecord(txHash, &outport.StatusInfo{ + CompletedEvent: true, + ErrorEvent: false, + Status: transaction.TxStatusSuccess.String(), + }) + require.Equal(t, &outport.StatusInfo{ + CompletedEvent: true, + Status: "success", + }, statusInfoProc.getAllRecords()[txHash]) + + statusInfoProc.addRecord(txHash, &outport.StatusInfo{ + ErrorEvent: true, + Status: transaction.TxStatusFail.String(), + }) + require.Equal(t, &outport.StatusInfo{ + CompletedEvent: true, + ErrorEvent: true, + Status: "fail", + }, statusInfoProc.getAllRecords()[txHash]) + + statusInfoProc.addRecord(txHash, &outport.StatusInfo{ + ErrorEvent: false, + CompletedEvent: false, + Status: transaction.TxStatusSuccess.String(), + }) + require.Equal(t, &outport.StatusInfo{ + CompletedEvent: true, + ErrorEvent: true, + Status: "fail", + }, statusInfoProc.getAllRecords()[txHash]) +} diff --git a/process/elasticproc/miniblocks/miniblocksProcessor.go b/process/elasticproc/miniblocks/miniblocksProcessor.go index fd8067b9..c4c1c061 100644 --- a/process/elasticproc/miniblocks/miniblocksProcessor.go +++ b/process/elasticproc/miniblocks/miniblocksProcessor.go @@ -40,23 +40,29 @@ func NewMiniblocksProcessor( }, nil } -// PrepareDBMiniblocks will prepare miniblocks from body -func (mp *miniblocksProcessor) PrepareDBMiniblocks(header coreData.HeaderHandler, body *block.Body) []*data.Miniblock { +// PrepareDBMiniblocks will prepare miniblocks +func (mp *miniblocksProcessor) PrepareDBMiniblocks(header coreData.HeaderHandler, miniBlocks []*block.MiniBlock) []*data.Miniblock { headerHash, err := mp.calculateHash(header) if err != nil { log.Warn("indexer: could not calculate header hash", "error", err) return nil } + selfShard := header.GetShardID() dbMiniblocks := make([]*data.Miniblock, 0) - for mbIndex, miniblock := range body.MiniBlocks { - dbMiniblock, errPrepareMiniblock := mp.prepareMiniblockForDB(mbIndex, miniblock, header, headerHash) - if errPrepareMiniblock != nil { - log.Warn("miniblocksProcessor.PrepareDBMiniblocks cannot prepare miniblock", "error", errPrepareMiniblock) + for mbIndex, miniBlock := range miniBlocks { + if miniBlock.ReceiverShardID == core.AllShardId && selfShard != core.MetachainShardId { + // will not index the miniblock on the destination if is for all shards continue } - dbMiniblocks = append(dbMiniblocks, dbMiniblock) + dbMiniBlock, errPrepareMiniBlock := mp.prepareMiniblockForDB(mbIndex, miniBlock, header, headerHash) + if errPrepareMiniBlock != nil { + log.Warn("miniblocksProcessor.PrepareDBMiniBlocks cannot prepare miniblock", "error", errPrepareMiniBlock) + continue + } + + dbMiniblocks = append(dbMiniblocks, dbMiniBlock) } return dbMiniblocks @@ -88,7 +94,7 @@ func (mp *miniblocksProcessor) prepareMiniblockForDB( isIntraShard := dbMiniblock.SenderShardID == dbMiniblock.ReceiverShardID isCrossOnSource := !isIntraShard && dbMiniblock.SenderShardID == header.GetShardID() if isIntraShard || isCrossOnSource { - mp.setFieldsMBIntraShardAndCrossFromMe(mbIndex, header, encodedHeaderHash, dbMiniblock) + mp.setFieldsMBIntraShardAndCrossFromMe(mbIndex, header, encodedHeaderHash, dbMiniblock, isIntraShard) return dbMiniblock, nil } @@ -105,6 +111,7 @@ func (mp *miniblocksProcessor) setFieldsMBIntraShardAndCrossFromMe( header coreData.HeaderHandler, headerHash string, dbMiniblock *data.Miniblock, + isIntraShard bool, ) { processingType, constructionState := mp.computeProcessingTypeAndConstructionState(mbIndex, header) @@ -112,9 +119,11 @@ func (mp *miniblocksProcessor) setFieldsMBIntraShardAndCrossFromMe( switch { case constructionState == int32(block.Final) && processingType == block.Normal.String(): dbMiniblock.SenderBlockHash = headerHash - dbMiniblock.ReceiverBlockHash = headerHash dbMiniblock.ProcessingTypeOnSource = processingType - dbMiniblock.ProcessingTypeOnDestination = processingType + if isIntraShard { + dbMiniblock.ReceiverBlockHash = headerHash + dbMiniblock.ProcessingTypeOnDestination = processingType + } case constructionState == int32(block.Proposed) && processingType == block.Scheduled.String(): dbMiniblock.SenderBlockHash = headerHash dbMiniblock.ProcessingTypeOnSource = processingType @@ -127,7 +136,7 @@ func (mp *miniblocksProcessor) setFieldsMBIntraShardAndCrossFromMe( func (mp *miniblocksProcessor) computeProcessingTypeAndConstructionState(mbIndex int, header coreData.HeaderHandler) (string, int32) { miniblockHeaders := header.GetMiniBlockHeaderHandlers() if len(miniblockHeaders) <= mbIndex { - return "", 0 + return block.Normal.String(), int32(block.Final) } processingType := miniblockHeaders[mbIndex].GetProcessingType() diff --git a/process/elasticproc/miniblocks/miniblocksProcessor_test.go b/process/elasticproc/miniblocks/miniblocksProcessor_test.go index 3ca8f4b2..f9cf687b 100644 --- a/process/elasticproc/miniblocks/miniblocksProcessor_test.go +++ b/process/elasticproc/miniblocks/miniblocksProcessor_test.go @@ -6,6 +6,7 @@ import ( dataBlock "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" + "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/mock" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/stretchr/testify/require" @@ -64,7 +65,7 @@ func TestMiniblocksProcessor_PrepareDBMiniblocks(t *testing.T) { }, } - miniblocks := mp.PrepareDBMiniblocks(header, body) + miniblocks := mp.PrepareDBMiniblocks(header, body.MiniBlocks) require.Len(t, miniblocks, 3) } @@ -90,22 +91,35 @@ func TestMiniblocksProcessor_PrepareScheduledMB(t *testing.T) { }, }, } - body := &dataBlock.Body{ - MiniBlocks: []*dataBlock.MiniBlock{ - { - SenderShardID: 0, - ReceiverShardID: 1, - }, - { - SenderShardID: 0, - ReceiverShardID: 1, - }, + miniBlocks := []*dataBlock.MiniBlock{ + { + SenderShardID: 0, + ReceiverShardID: 1, + }, + { + SenderShardID: 0, + ReceiverShardID: 1, + }, + { + SenderShardID: 0, + ReceiverShardID: 0, }, } - miniblocks := mp.PrepareDBMiniblocks(header, body) - require.Len(t, miniblocks, 2) + miniblocks := mp.PrepareDBMiniblocks(header, miniBlocks) + require.Len(t, miniblocks, 3) require.Equal(t, dataBlock.Scheduled.String(), miniblocks[1].ProcessingTypeOnSource) + + require.Equal(t, &data.Miniblock{ + Hash: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + SenderShardID: 0, + ReceiverShardID: 0, + SenderBlockHash: "84b80cbf612d067201b5260b4c6f90fa7b5c11e898fa9c1c65f4c75e61e41619", + ReceiverBlockHash: "84b80cbf612d067201b5260b4c6f90fa7b5c11e898fa9c1c65f4c75e61e41619", + ProcessingTypeOnSource: dataBlock.Normal.String(), + ProcessingTypeOnDestination: dataBlock.Normal.String(), + Type: dataBlock.TxBlock.String(), + }, miniblocks[2]) } func TestMiniblocksProcessor_GetMiniblocksHashesHexEncoded(t *testing.T) { @@ -136,9 +150,9 @@ func TestMiniblocksProcessor_GetMiniblocksHashesHexEncoded(t *testing.T) { } expectedHashes := []string{ - "c57392e53257b4861f5e406349a8deb89c6dbc2127564ee891a41a188edbf01a", - "28fda294dc987e5099d75e53cd6f87a9a42b96d55242a634385b5d41175c0c21", - "44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "0796d34e8d443fd31bf4d9ec4051421b4d5d0e8c1db9ff942d6f4dc3a9ca2803", + "4cc379ab1f0aef6602e85a0a7ffabb5bc9a2ba646dc0fd720028e06527bf873f", + "8748c4677b01f7db984004fa8465afbf55feaab4b573174c8c0afa282941b9e4", } miniblocksHashes := mp.GetMiniblocksHashesHexEncoded(header, body) require.Equal(t, expectedHashes, miniblocksHashes) @@ -177,9 +191,9 @@ func TestMiniblocksProcessor_GetMiniblocksHashesHexEncodedImportDBMode(t *testin } expectedHashes := []string{ - "3acf97c324f5e8cd1e2d87de862b3105a9f08262c7914be2e186ced2a1cf1124", - "40a551b2ebc5e4b5a55e73d49ec056c72af6314606850c4d54dadfad3a7e23e5", - "4a270e1ddac6b429c14c7ebccdcdd53e4f68aeebfc41552c775a7f5a5c35d06d", + "11a1bb4065e16a2e93b2b5ac5957b7b69f1cfba7579b170b24f30dab2d3162e0", + "68e9a4360489ab7a6e99f92e05d1a3f06a982b7b157ac23fdf39f2392bf88e15", + "d1fd2a5c95c8899ebbaad035b6b0f77c5103b3aacfe630b1a7c51468d682bb1b", } miniblocksHashes := mp.GetMiniblocksHashesHexEncoded(header, body) require.Equal(t, expectedHashes, miniblocksHashes) diff --git a/process/elasticproc/miniblocks/serialize.go b/process/elasticproc/miniblocks/serialize.go index 20844538..495ef22e 100644 --- a/process/elasticproc/miniblocks/serialize.go +++ b/process/elasticproc/miniblocks/serialize.go @@ -12,13 +12,12 @@ import ( // SerializeBulkMiniBlocks will serialize the provided miniblocks slice in a way that Elasticsearch expects a bulk request func (mp *miniblocksProcessor) SerializeBulkMiniBlocks( bulkMbs []*data.Miniblock, - existsInDb map[string]bool, buffSlice *data.BufferSlice, index string, shardID uint32, ) { for _, mb := range bulkMbs { - meta, serializedData, err := mp.prepareMiniblockData(mb, existsInDb[mb.Hash], index, shardID) + meta, serializedData, err := mp.prepareMiniblockData(mb, index, shardID) if err != nil { log.Warn("miniblocksProcessor.prepareMiniblockData cannot prepare miniblock data", "error", err) continue @@ -32,28 +31,40 @@ func (mp *miniblocksProcessor) SerializeBulkMiniBlocks( } } -func (mp *miniblocksProcessor) prepareMiniblockData(miniblockDB *data.Miniblock, isInDB bool, index string, shardID uint32) ([]byte, []byte, error) { +func (mp *miniblocksProcessor) prepareMiniblockData(miniblockDB *data.Miniblock, index string, shardID uint32) ([]byte, []byte, error) { mbHash := miniblockDB.Hash miniblockDB.Hash = "" - if !isInDB { - meta := []byte(fmt.Sprintf(`{ "index" : { "_index":"%s", "_id" : "%s"} }%s`, index, converters.JsonEscape(mbHash), "\n")) - serializedData, err := json.Marshal(miniblockDB) - - return meta, serializedData, err + mbBytes, errMarshal := json.Marshal(miniblockDB) + if errMarshal != nil { + return nil, nil, errMarshal } // prepare data for update operation meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(mbHash), "\n")) - if shardID == miniblockDB.SenderShardID && miniblockDB.ProcessingTypeOnDestination != block.Processed.String() { - // prepare for update sender block hash - serializedData := []byte(fmt.Sprintf(`{ "doc" : { "senderBlockHash" : "%s", "procTypeS": "%s" } }`, converters.JsonEscape(miniblockDB.SenderBlockHash), converters.JsonEscape(miniblockDB.ProcessingTypeOnSource))) - return meta, serializedData, nil + notProcessedOnSource := shardID == miniblockDB.SenderShardID && miniblockDB.ProcessingTypeOnDestination != block.Processed.String() + codeToExecute := ` + if ('create' == ctx.op) { + ctx._source = params.mb + } else { + if (params.npos) { + ctx._source.senderBlockHash = params.mb.senderBlockHash; + ctx._source.procTypeS = params.mb.procTypeS; + } else { + ctx._source.receiverBlockHash = params.mb.receiverBlockHash; + ctx._source.procTypeD = params.mb.procTypeD; + } } +` - // prepare for update receiver block hash - serializedData := []byte(fmt.Sprintf(`{ "doc" : { "receiverBlockHash" : "%s", "procTypeD": "%s" } }`, converters.JsonEscape(miniblockDB.ReceiverBlockHash), converters.JsonEscape(miniblockDB.ProcessingTypeOnDestination))) + serializedDataStr := fmt.Sprintf(`{"scripted_upsert": true, "script": {`+ + `"source": "%s",`+ + `"lang": "painless",`+ + `"params": { "mb": %s, "npos": %t }},`+ + `"upsert": {}}`, + converters.FormatPainlessSource(codeToExecute), mbBytes, notProcessedOnSource, + ) - return meta, serializedData, nil + return meta, []byte(serializedDataStr), nil } diff --git a/process/elasticproc/miniblocks/serialize_test.go b/process/elasticproc/miniblocks/serialize_test.go index 5ce07e53..612f3ecd 100644 --- a/process/elasticproc/miniblocks/serialize_test.go +++ b/process/elasticproc/miniblocks/serialize_test.go @@ -20,12 +20,12 @@ func TestMiniblocksProcessor_SerializeBulkMiniBlocks(t *testing.T) { } buffSlice := data.NewBufferSlice(data.DefaultMaxBulkSize) - mp.SerializeBulkMiniBlocks(miniblocks, nil, buffSlice, "miniblocks", 0) + mp.SerializeBulkMiniBlocks(miniblocks, buffSlice, "miniblocks", 0) - expectedBuff := `{ "index" : { "_index":"miniblocks", "_id" : "h1"} } -{"senderShard":0,"receiverShard":1,"type":"","timestamp":0} -{ "index" : { "_index":"miniblocks", "_id" : "h2"} } -{"senderShard":0,"receiverShard":2,"type":"","timestamp":0} + expectedBuff := `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":0,"receiverShard":1,"type":"","timestamp":0}, "npos": true }},"upsert": {}} +{ "update" : {"_index":"miniblocks", "_id" : "h2" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":0,"receiverShard":2,"type":"","timestamp":0}, "npos": true }},"upsert": {}} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } @@ -41,14 +41,12 @@ func TestMiniblocksProcessor_SerializeBulkMiniBlocksInDB(t *testing.T) { } buffSlice := data.NewBufferSlice(data.DefaultMaxBulkSize) - mp.SerializeBulkMiniBlocks(miniblocks, map[string]bool{ - "h1": true, - }, buffSlice, "miniblocks", 0) + mp.SerializeBulkMiniBlocks(miniblocks, buffSlice, "miniblocks", 0) expectedBuff := `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } -{ "doc" : { "senderBlockHash" : "", "procTypeS": "" } } -{ "index" : { "_index":"miniblocks", "_id" : "h2"} } -{"senderShard":0,"receiverShard":2,"type":"","timestamp":0} +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":0,"receiverShard":1,"type":"","timestamp":0}, "npos": true }},"upsert": {}} +{ "update" : {"_index":"miniblocks", "_id" : "h2" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":0,"receiverShard":2,"type":"","timestamp":0}, "npos": true }},"upsert": {}} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } @@ -61,12 +59,10 @@ func TestSerializeMiniblock_CrossShardNormal(t *testing.T) { } buffSlice := data.NewBufferSlice(data.DefaultMaxBulkSize) - mp.SerializeBulkMiniBlocks(miniblocks, map[string]bool{ - "h1": true, - }, buffSlice, "miniblocks", 1) + mp.SerializeBulkMiniBlocks(miniblocks, buffSlice, "miniblocks", 1) expectedBuff := `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } -{ "doc" : { "receiverBlockHash" : "receiverBlock", "procTypeD": "" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":0,"receiverShard":1,"receiverBlockHash":"receiverBlock","type":"","timestamp":0}, "npos": false }},"upsert": {}} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } @@ -80,12 +76,10 @@ func TestSerializeMiniblock_IntraShardScheduled(t *testing.T) { } buffSlice := data.NewBufferSlice(data.DefaultMaxBulkSize) - mp.SerializeBulkMiniBlocks(miniblocks, map[string]bool{ - "h1": false, - }, buffSlice, "miniblocks", 1) + mp.SerializeBulkMiniBlocks(miniblocks, buffSlice, "miniblocks", 1) - expectedBuff := `{ "index" : { "_index":"miniblocks", "_id" : "h1"} } -{"senderShard":1,"receiverShard":1,"senderBlockHash":"senderBlock","type":"","procTypeS":"Scheduled","timestamp":0} + expectedBuff := `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":1,"receiverShard":1,"senderBlockHash":"senderBlock","type":"","procTypeS":"Scheduled","timestamp":0}, "npos": true }},"upsert": {}} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) @@ -95,12 +89,10 @@ func TestSerializeMiniblock_IntraShardScheduled(t *testing.T) { } buffSlice = data.NewBufferSlice(data.DefaultMaxBulkSize) - mp.SerializeBulkMiniBlocks(miniblocks, map[string]bool{ - "h1": true, - }, buffSlice, "miniblocks", 1) + mp.SerializeBulkMiniBlocks(miniblocks, buffSlice, "miniblocks", 1) expectedBuff = `{ "update" : {"_index":"miniblocks", "_id" : "h1" } } -{ "doc" : { "receiverBlockHash" : "receiverBlock", "procTypeD": "Processed" } } +{"scripted_upsert": true, "script": {"source": "if ('create' == ctx.op) {ctx._source = params.mb} else {if (params.npos) {ctx._source.senderBlockHash = params.mb.senderBlockHash;ctx._source.procTypeS = params.mb.procTypeS;} else {ctx._source.receiverBlockHash = params.mb.receiverBlockHash;ctx._source.procTypeD = params.mb.procTypeD;}}","lang": "painless","params": { "mb": {"senderShard":1,"receiverShard":1,"receiverBlockHash":"receiverBlock","type":"","procTypeD":"Processed","timestamp":0}, "npos": false }},"upsert": {}} ` require.Equal(t, expectedBuff, buffSlice.Buffers()[0].String()) } diff --git a/process/elasticproc/statistics/serialize.go b/process/elasticproc/statistics/serialize.go index 0cac6d60..09879f25 100644 --- a/process/elasticproc/statistics/serialize.go +++ b/process/elasticproc/statistics/serialize.go @@ -4,7 +4,9 @@ import ( "bytes" "encoding/json" "fmt" + "time" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" logger "github.com/multiversx/mx-chain-logger-go" ) @@ -20,10 +22,17 @@ func NewStatisticsProcessor() *statisticsProcessor { } // SerializeRoundsInfo will serialize information about rounds -func (sp *statisticsProcessor) SerializeRoundsInfo(roundsInfo []*data.RoundInfo) *bytes.Buffer { +func (sp *statisticsProcessor) SerializeRoundsInfo(rounds *outport.RoundsInfo) *bytes.Buffer { buff := &bytes.Buffer{} - for _, info := range roundsInfo { - serializedRoundInfo, meta := serializeRoundInfo(info) + for _, info := range rounds.RoundsInfo { + serializedRoundInfo, meta := serializeRoundInfo(&data.RoundInfo{ + Round: info.Round, + SignersIndexes: info.SignersIndexes, + BlockWasProposed: info.BlockWasProposed, + ShardId: info.ShardId, + Epoch: info.Epoch, + Timestamp: time.Duration(info.Timestamp), + }) buff.Grow(len(meta) + len(serializedRoundInfo)) _, err := buff.Write(meta) @@ -41,7 +50,7 @@ func (sp *statisticsProcessor) SerializeRoundsInfo(roundsInfo []*data.RoundInfo) } func serializeRoundInfo(info *data.RoundInfo) ([]byte, []byte) { - meta := []byte(fmt.Sprintf(`{ "index" : { "_id" : "%d_%d" } }%s`, info.ShardId, info.Index, "\n")) + meta := []byte(fmt.Sprintf(`{ "index" : { "_id" : "%d_%d" } }%s`, info.ShardId, info.Round, "\n")) serializedInfo, err := json.Marshal(info) if err != nil { diff --git a/process/elasticproc/statistics/serialize_test.go b/process/elasticproc/statistics/serialize_test.go index 06d3b6cf..a0cd079c 100644 --- a/process/elasticproc/statistics/serialize_test.go +++ b/process/elasticproc/statistics/serialize_test.go @@ -3,7 +3,7 @@ package statistics import ( "testing" - "github.com/multiversx/mx-chain-es-indexer-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/stretchr/testify/require" ) @@ -12,9 +12,11 @@ func TestStatisticsProcessor_SerializeRoundsInfo(t *testing.T) { sp := NewStatisticsProcessor() - buff := sp.SerializeRoundsInfo([]*data.RoundInfo{{ - Epoch: 1, - }}) + buff := sp.SerializeRoundsInfo(&outport.RoundsInfo{ + RoundsInfo: []*outport.RoundInfo{{ + Epoch: 1, + }}, + }) expectedBuff := `{ "index" : { "_id" : "0_0" } } {"round":0,"signersIndexes":null,"blockWasProposed":false,"shardId":0,"epoch":1,"timestamp":0} ` diff --git a/process/elasticproc/tags/serialize.go b/process/elasticproc/tags/serialize.go index be13563f..1de2f129 100644 --- a/process/elasticproc/tags/serialize.go +++ b/process/elasticproc/tags/serialize.go @@ -16,12 +16,16 @@ func (tc *tagsCount) Serialize(buffSlice *data.BufferSlice, index string) error } base64Tag := base64.StdEncoding.EncodeToString([]byte(tag)) + if len(base64Tag) > converters.MaxIDSize { + base64Tag = base64Tag[:converters.MaxIDSize] + } meta := []byte(fmt.Sprintf(`{ "update" : {"_index":"%s", "_id" : "%s" } }%s`, index, converters.JsonEscape(base64Tag), "\n")) codeToExecute := ` ctx._source.count += params.count; ctx._source.tag = params.tag ` + serializedDataStr := fmt.Sprintf(`{"script": {"source": "%s","lang": "painless","params": {"count": %d, "tag": "%s"}},"upsert": {"count": %d, "tag":"%s"}}`, converters.FormatPainlessSource(codeToExecute), count, converters.JsonEscape(tag), count, converters.JsonEscape(tag), ) diff --git a/process/elasticproc/tags/serialize_test.go b/process/elasticproc/tags/serialize_test.go index 80e69d7d..ffeeadf5 100644 --- a/process/elasticproc/tags/serialize_test.go +++ b/process/elasticproc/tags/serialize_test.go @@ -1,9 +1,13 @@ package tags import ( + "crypto/rand" + "encoding/base64" + "fmt" "testing" "github.com/multiversx/mx-chain-es-indexer-go/data" + "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" "github.com/stretchr/testify/require" ) @@ -24,3 +28,23 @@ func TestTagsCount_Serialize(t *testing.T) { ` require.Equal(t, expected, buffSlice.Buffers()[0].String()) } + +func TestTagsCount_TruncateID(t *testing.T) { + t.Parallel() + + tagsC := NewTagsCount() + + randomBytes := make([]byte, 600) + _, _ = rand.Read(randomBytes) + + tagsC.ParseTags([]string{string(randomBytes)}) + + buffSlice := data.NewBufferSlice(data.DefaultMaxBulkSize) + err := tagsC.Serialize(buffSlice, "tags") + require.Nil(t, err) + + expected := fmt.Sprintf(`{ "update" : {"_index":"tags", "_id" : "%s" } } +{"script": {"source": "ctx._source.count += params.count; ctx._source.tag = params.tag","lang": "painless","params": {"count": 1, "tag": "%s"}},"upsert": {"count": 1, "tag":"%s"}} +`, base64.StdEncoding.EncodeToString(randomBytes)[:converters.MaxIDSize], converters.JsonEscape(string(randomBytes)), converters.JsonEscape(string(randomBytes))) + require.Equal(t, expected, buffSlice.Buffers()[0].String()) +} diff --git a/process/elasticproc/transactions/checkers.go b/process/elasticproc/transactions/checkers.go index 9620048a..7d9fcaf7 100644 --- a/process/elasticproc/transactions/checkers.go +++ b/process/elasticproc/transactions/checkers.go @@ -9,7 +9,6 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" coreData "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" elasticIndexer "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" @@ -51,13 +50,10 @@ func areESDTValuesOK(values []string) bool { } func checkPrepareTransactionForDatabaseArguments( - body *block.Body, header coreData.HeaderHandler, - pool *outport.Pool, + pool *outport.TransactionPool, ) error { - if body == nil { - return elasticIndexer.ErrNilBlockBody - } + if check.IfNil(header) { return elasticIndexer.ErrNilHeaderHandler } diff --git a/process/elasticproc/transactions/checkers_test.go b/process/elasticproc/transactions/checkers_test.go index 8901b401..06ebe9e9 100644 --- a/process/elasticproc/transactions/checkers_test.go +++ b/process/elasticproc/transactions/checkers_test.go @@ -72,27 +72,20 @@ func TestCheckTxsProcessorArg(t *testing.T) { tests := []struct { name string - args func() (body *block.Body, header coreData.HeaderHandler, pool *outport.Pool) + args func() (header coreData.HeaderHandler, pool *outport.TransactionPool) exErr error }{ - { - name: "NilBlockBody", - args: func() (body *block.Body, header coreData.HeaderHandler, pool *outport.Pool) { - return nil, &block.Header{}, &outport.Pool{} - }, - exErr: elasticIndexer.ErrNilBlockBody, - }, { name: "NilHeaderHandler", - args: func() (body *block.Body, header coreData.HeaderHandler, pool *outport.Pool) { - return &block.Body{}, nil, &outport.Pool{} + args: func() (header coreData.HeaderHandler, pool *outport.TransactionPool) { + return nil, &outport.TransactionPool{} }, exErr: elasticIndexer.ErrNilHeaderHandler, }, { name: "NilPool", - args: func() (body *block.Body, header coreData.HeaderHandler, pool *outport.Pool) { - return &block.Body{}, &block.Header{}, nil + args: func() (header coreData.HeaderHandler, pool *outport.TransactionPool) { + return &block.Header{}, nil }, exErr: elasticIndexer.ErrNilPool, }, diff --git a/process/elasticproc/transactions/interface.go b/process/elasticproc/transactions/interface.go index 9cf02bb7..4a201d14 100644 --- a/process/elasticproc/transactions/interface.go +++ b/process/elasticproc/transactions/interface.go @@ -1,8 +1,15 @@ package transactions -import datafield "github.com/multiversx/mx-chain-vm-common-go/parsers/dataField" +import ( + "github.com/multiversx/mx-chain-core-go/data/outport" + datafield "github.com/multiversx/mx-chain-vm-common-go/parsers/dataField" +) // DataFieldParser defines what a data field parser should be able to do type DataFieldParser interface { Parse(dataField []byte, sender, receiver []byte, numOfShards uint32) *datafield.ResponseParseData } + +type feeInfoHandler interface { + GetFeeInfo() *outport.FeeInfo +} diff --git a/process/elasticproc/transactions/scrsDataToTransactions.go b/process/elasticproc/transactions/scrsDataToTransactions.go index becb2299..f18aa741 100644 --- a/process/elasticproc/transactions/scrsDataToTransactions.go +++ b/process/elasticproc/transactions/scrsDataToTransactions.go @@ -3,13 +3,9 @@ package transactions import ( "encoding/hex" "math/big" - "strings" - "github.com/multiversx/mx-chain-core-go/core" - "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" - vmcommon "github.com/multiversx/mx-chain-vm-common-go" ) const ( @@ -17,25 +13,11 @@ const ( ) type scrsDataToTransactions struct { - retCodes []string balanceConverter dataindexer.BalanceConverter } func newScrsDataToTransactions(balanceConverter dataindexer.BalanceConverter) *scrsDataToTransactions { return &scrsDataToTransactions{ - retCodes: []string{ - vmcommon.FunctionNotFound.String(), - vmcommon.FunctionWrongSignature.String(), - vmcommon.ContractNotFound.String(), - vmcommon.UserError.String(), - vmcommon.OutOfGas.String(), - vmcommon.AccountCollision.String(), - vmcommon.OutOfFunds.String(), - vmcommon.CallStackOverFlow.String(), - vmcommon.ContractInvalid.String(), - vmcommon.ExecutionFailed.String(), - vmcommon.UpgradeFailed.String(), - }, balanceConverter: balanceConverter, } } @@ -65,127 +47,35 @@ func (st *scrsDataToTransactions) processTransactionsAfterSCRsWereAttached(trans if len(tx.SmartContractResults) == 0 { continue } - - st.fillTxWithSCRsFields(tx) - } -} - -func (st *scrsDataToTransactions) fillTxWithSCRsFields(tx *data.Transaction) { - tx.HasSCR = true - - if isRelayedTx(tx) { - return - } - - // ignore invalid transaction because status and gas fields were already set - if tx.Status == transaction.TxStatusInvalid.String() { - return - } - - if hasSuccessfulSCRs(tx) { - return + tx.HasSCR = true } - - if hasCrossShardPendingTransfer(tx) { - return - } - - if st.hasSCRWithErrorCode(tx) { - tx.Status = transaction.TxStatusFail.String() - } -} - -func (st *scrsDataToTransactions) hasSCRWithErrorCode(tx *data.Transaction) bool { - for _, scr := range tx.SmartContractResults { - for _, codeStr := range st.retCodes { - if strings.Contains(string(scr.Data), hex.EncodeToString([]byte(codeStr))) || - scr.ReturnMessage == codeStr { - return true - } - } - } - - return false -} - -func hasSuccessfulSCRs(tx *data.Transaction) bool { - for _, scr := range tx.SmartContractResults { - if isScResultSuccessful(scr.Data) { - return true - } - } - - return false } -func hasCrossShardPendingTransfer(tx *data.Transaction) bool { - for _, scr := range tx.SmartContractResults { - splitData := strings.Split(string(scr.Data), data.AtSeparator) - if len(splitData) < 2 { - continue - } - - isMultiTransferOrNFTTransfer := splitData[0] == core.BuiltInFunctionESDTNFTTransfer || splitData[0] == core.BuiltInFunctionMultiESDTNFTTransfer - if !isMultiTransferOrNFTTransfer { - continue - } - - if scr.SenderShard != scr.ReceiverShard { - return true - } - } - - return false -} - -func (st *scrsDataToTransactions) processSCRsWithoutTx(scrs []*data.ScResult) (map[string]string, map[string]*data.FeeData) { - txHashStatus := make(map[string]string) +func (st *scrsDataToTransactions) processSCRsWithoutTx(scrs []*data.ScResult) map[string]*data.FeeData { txHashRefund := make(map[string]*data.FeeData) for _, scr := range scrs { - if scr.InitialTxGasUsed != 0 { - var feeNum float64 - var err error - initialTxFeeBig, ok := big.NewInt(0).SetString(scr.InitialTxFee, 10) - if ok { - feeNum, err = st.balanceConverter.ComputeESDTBalanceAsFloat(initialTxFeeBig) - } - if err != nil { - log.Warn("scrsDataToTransactions.processSCRsWithoutTx: cannot compute fee as num", - "initial Tx fee", initialTxFeeBig, "error", err) - } - - txHashRefund[scr.OriginalTxHash] = &data.FeeData{ - FeeNum: feeNum, - Fee: scr.InitialTxFee, - GasUsed: scr.InitialTxGasUsed, - Receiver: scr.Receiver, - } - } - - if !st.isESDTNFTTransferOrMultiTransferWithError(string(scr.Data)) { + if scr.InitialTxGasUsed == 0 { continue } - txHashStatus[scr.OriginalTxHash] = transaction.TxStatusFail.String() - } - - return txHashStatus, txHashRefund -} - -func (st *scrsDataToTransactions) isESDTNFTTransferOrMultiTransferWithError(scrData string) bool { - splitData := strings.Split(scrData, data.AtSeparator) - isMultiTransferOrNFTTransfer := splitData[0] == core.BuiltInFunctionESDTNFTTransfer || splitData[0] == core.BuiltInFunctionMultiESDTNFTTransfer - if !isMultiTransferOrNFTTransfer || len(splitData) < minNumOfArgumentsNFTTransferORMultiTransfer { - return false - } + var feeNum float64 + var err error + initialTxFeeBig, ok := big.NewInt(0).SetString(scr.InitialTxFee, 10) + if ok { + feeNum, err = st.balanceConverter.ConvertBigValueToFloat(initialTxFeeBig) + } + if err != nil { + log.Warn("scrsDataToTransactions.processSCRsWithoutTx: cannot compute fee as num", + "initial Tx fee", initialTxFeeBig, "error", err) + } - latestArgumentFromDataField := splitData[len(splitData)-1] - for _, retCode := range st.retCodes { - isWithError := latestArgumentFromDataField == hex.EncodeToString([]byte(retCode)) - if isWithError { - return true + txHashRefund[scr.OriginalTxHash] = &data.FeeData{ + FeeNum: feeNum, + Fee: scr.InitialTxFee, + GasUsed: scr.InitialTxGasUsed, + Receiver: scr.Receiver, } } - return false + return txHashRefund } diff --git a/process/elasticproc/transactions/scrsDataToTransactions_test.go b/process/elasticproc/transactions/scrsDataToTransactions_test.go index fddfbb1a..4858c642 100644 --- a/process/elasticproc/transactions/scrsDataToTransactions_test.go +++ b/process/elasticproc/transactions/scrsDataToTransactions_test.go @@ -88,19 +88,7 @@ func TestProcessTransactionsAfterSCRsWereAttached(t *testing.T) { } scrsDataToTxs.processTransactionsAfterSCRsWereAttached(txs) - require.Equal(t, "fail", tx1.Status) + require.Equal(t, "", tx1.Status) require.Equal(t, tx1.GasLimit, tx1.GasUsed) require.Equal(t, "168805000000000", tx1.Fee) } - -func TestIsESDTNFTTransferWithUserError(t *testing.T) { - t.Parallel() - - bc, _ := converters.NewBalanceConverter(18) - scrsDataToTxs := newScrsDataToTransactions(bc) - - require.False(t, scrsDataToTxs.isESDTNFTTransferOrMultiTransferWithError("ESDTNFTTransfer@45474c444d4558462d333766616239@06f5@045d2bd2629df0d2ea@0801120a00045d2bd2629df0d2ea226408f50d1a2000000000000000000500e809539d1d8febc54df4e6fe826fdc8ab6c88cf07ceb32003a3b00000007401c82df9c05a80000000000000407000000000000040f010000000009045d2bd2629df0d2ea0000000000000009045d2bd2629df0d2ea@636c61696d52657761726473")) - require.False(t, scrsDataToTxs.isESDTNFTTransferOrMultiTransferWithError("ESDTTransfer@4d45582d623662623764@74b7e37e3c2efe5f11@")) - require.False(t, scrsDataToTxs.isESDTNFTTransferOrMultiTransferWithError("ESDTNFTTransfer@45474c444d4558462d333766616239@070f@045d2bd2629df0d2ea@0801120a00045d2bd2629df0d2ea2264088f0e1a2000000000000000000500e809539d1d8febc54df4e6fe826fdc8ab6c88cf07ceb32003a3b000000074034d62af2b6930000000000000407000000000000040f010000000009045d2bd2629df0d2ea0000000000000009045d2bd2629df0d2ea@")) - require.True(t, scrsDataToTxs.isESDTNFTTransferOrMultiTransferWithError("MultiESDTNFTTransfer@02@5745474c442d626434643739@00@38e62046fb1a0000@584d45582d666461333535@07@0801120c00048907e58284c28e898e2922520807120a4d45582d3435356335371a20000000000000000005007afb2c871d1647372fd53a9eb3e53e5a8ec9251cb05532003a1e0000000a4d45582d343535633537000000000000000000000000000008e8@657865637574696f6e206661696c6564")) -} diff --git a/process/elasticproc/transactions/serialize.go b/process/elasticproc/transactions/serialize.go index 0d01c51b..2cab3fc9 100644 --- a/process/elasticproc/transactions/serialize.go +++ b/process/elasticproc/transactions/serialize.go @@ -6,6 +6,7 @@ import ( "strings" "github.com/multiversx/mx-chain-core-go/core" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" ) @@ -80,7 +81,7 @@ func (tdp *txsDatabaseProcessor) SerializeTransactionsFeeData(txHashRefund map[s // SerializeTransactions will serialize the transactions in a way that Elasticsearch expects a bulk request func (tdp *txsDatabaseProcessor) SerializeTransactions( transactions []*data.Transaction, - txHashStatus map[string]string, + txHashStatusInfo map[string]*outport.StatusInfo, selfShardID uint32, buffSlice *data.BufferSlice, index string, @@ -97,7 +98,7 @@ func (tdp *txsDatabaseProcessor) SerializeTransactions( } } - err := serializeTxHashStatus(buffSlice, txHashStatus, index) + err := serializeTxHashStatus(buffSlice, txHashStatusInfo, index) if err != nil { return err } @@ -105,22 +106,38 @@ func (tdp *txsDatabaseProcessor) SerializeTransactions( return nil } -func serializeTxHashStatus(buffSlice *data.BufferSlice, txHashStatus map[string]string, index string) error { - for txHash, status := range txHashStatus { +func serializeTxHashStatus(buffSlice *data.BufferSlice, txHashStatusInfo map[string]*outport.StatusInfo, index string) error { + for txHash, statusInfo := range txHashStatusInfo { metaData := []byte(fmt.Sprintf(`{"update":{ "_index":"%s","_id":"%s"}}%s`, index, txHash, "\n")) newTx := &data.Transaction{ - Status: status, + Status: statusInfo.Status, + ErrorEvent: statusInfo.ErrorEvent, + CompletedEvent: statusInfo.CompletedEvent, } marshaledTx, err := json.Marshal(newTx) if err != nil { return err } + marshaledStatusInfo, err := json.Marshal(statusInfo) + if err != nil { + return err + } codeToExecute := ` - ctx._source.status = params.status + if (!params.statusInfo.status.isEmpty()) { + ctx._source.status = params.statusInfo.status; + } + + if (params.statusInfo.completedEvent) { + ctx._source.completedEvent = params.statusInfo.completedEvent; + } + + if (params.statusInfo.errorEvent) { + ctx._source.errorEvent = params.statusInfo.errorEvent; + } ` - serializedData := []byte(fmt.Sprintf(`{"script": {"source": "%s","lang": "painless","params": {"status": "%s"}},"upsert": %s }`, converters.FormatPainlessSource(codeToExecute), converters.JsonEscape(status), string(marshaledTx))) + serializedData := []byte(fmt.Sprintf(`{"script": {"source": "%s","lang": "painless","params": {"statusInfo": %s}}, "upsert": %s }`, converters.FormatPainlessSource(codeToExecute), string(marshaledStatusInfo), string(marshaledTx))) err = buffSlice.PutData(metaData, serializedData) if err != nil { return err @@ -170,9 +187,11 @@ func prepareNFTESDTTransferOrMultiESDTTransfer(marshaledTx []byte) ([]byte, erro if ('create' == ctx.op) { ctx._source = params.tx; } else { - def status = ctx._source.status; + def status = ctx._source.status; + def errorEvent = ctx._source.errorEvent; ctx._source = params.tx; ctx._source.status = status; + ctx._source.errorEvent = errorEvent; } ` serializedData := []byte(fmt.Sprintf(`{"scripted_upsert": true, "script":{"source":"%s","lang": "painless","params":{"tx": %s}},"upsert":{}}`, diff --git a/process/elasticproc/transactions/serialize_test.go b/process/elasticproc/transactions/serialize_test.go index d81dd6aa..1565ee2d 100644 --- a/process/elasticproc/transactions/serialize_test.go +++ b/process/elasticproc/transactions/serialize_test.go @@ -3,6 +3,7 @@ package transactions import ( "testing" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/stretchr/testify/require" ) @@ -81,7 +82,7 @@ func TestSerializeTransactionsIntraShardTx(t *testing.T) { err := (&txsDatabaseProcessor{}).SerializeTransactions([]*data.Transaction{{ Hash: "txHash", SmartContractResults: []*data.ScResult{{}}, - }}, map[string]string{}, 0, buffSlice, "transactions") + }}, map[string]*outport.StatusInfo{}, 0, buffSlice, "transactions") require.Nil(t, err) expectedBuff := `{ "index" : { "_index":"transactions", "_id" : "txHash" } } @@ -100,7 +101,7 @@ func TestSerializeTransactionCrossShardTxSource(t *testing.T) { ReceiverShard: 1, SmartContractResults: []*data.ScResult{{}}, Version: 1, - }}, map[string]string{}, 0, buffSlice, "transactions") + }}, map[string]*outport.StatusInfo{}, 0, buffSlice, "transactions") require.Nil(t, err) expectedBuff := `{"update":{ "_index":"transactions", "_id":"txHash"}} @@ -119,7 +120,7 @@ func TestSerializeTransactionsCrossShardTxDestination(t *testing.T) { ReceiverShard: 0, SmartContractResults: []*data.ScResult{{}}, Version: 1, - }}, map[string]string{}, 0, buffSlice, "transactions") + }}, map[string]*outport.StatusInfo{}, 0, buffSlice, "transactions") require.Nil(t, err) expectedBuff := `{ "index" : { "_index":"transactions", "_id" : "txHash" } } diff --git a/process/elasticproc/transactions/smartContractResultsProcessor.go b/process/elasticproc/transactions/smartContractResultsProcessor.go index f7bc2e4c..e5f6a72e 100644 --- a/process/elasticproc/transactions/smartContractResultsProcessor.go +++ b/process/elasticproc/transactions/smartContractResultsProcessor.go @@ -2,21 +2,18 @@ package transactions import ( "encoding/hex" - "math/big" "strconv" "time" "github.com/multiversx/mx-chain-core-go/core" - "github.com/multiversx/mx-chain-core-go/data" coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/smartContractResult" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" indexerData "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" - datafield "github.com/multiversx/mx-chain-vm-common-go/parsers/dataField" ) type smartContractResultsProcessor struct { @@ -44,16 +41,16 @@ func newSmartContractResultsProcessor( } func (proc *smartContractResultsProcessor) processSCRs( - body *block.Body, + miniBlocks []*block.MiniBlock, header coreData.HeaderHandler, - txsHandler map[string]data.TransactionHandlerWithGasUsedAndFee, + scrs map[string]*outport.SCRInfo, numOfShards uint32, ) []*indexerData.ScResult { - allSCRs := make([]*indexerData.ScResult, 0, len(txsHandler)) + allSCRs := make([]*indexerData.ScResult, 0, len(scrs)) // a copy of the SCRS map is needed because proc.processSCRsFromMiniblock would remove items from the original map - workingSCRSMap := copySCRSMap(txsHandler) - for _, mb := range body.MiniBlocks { + workingSCRSMap := copySCRSMap(scrs) + for _, mb := range miniBlocks { if mb.Type != block.SmartContractResultBlock { continue } @@ -64,13 +61,8 @@ func (proc *smartContractResultsProcessor) processSCRs( } selfShardID := header.GetShardID() - for scrHash, noMBScr := range workingSCRSMap { - scr, ok := noMBScr.GetTxHandler().(*smartContractResult.SmartContractResult) - if !ok { - continue - } - - indexerScr := proc.prepareSmartContractResult([]byte(scrHash), nil, scr, header, selfShardID, selfShardID, noMBScr.GetFee(), noMBScr.GetGasUsed(), numOfShards) + for scrHashHex, noMBScrInfo := range workingSCRSMap { + indexerScr := proc.prepareSmartContractResult(scrHashHex, nil, noMBScrInfo, header, selfShardID, selfShardID, numOfShards) allSCRs = append(allSCRs, indexerScr) } @@ -81,7 +73,7 @@ func (proc *smartContractResultsProcessor) processSCRs( func (proc *smartContractResultsProcessor) processSCRsFromMiniblock( header coreData.HeaderHandler, mb *block.MiniBlock, - scrs map[string]data.TransactionHandlerWithGasUsedAndFee, + scrs map[string]*outport.SCRInfo, numOfShards uint32, ) []*indexerData.ScResult { mbHash, err := core.CalculateHash(proc.marshalizer, proc.hasher, mb) @@ -92,38 +84,34 @@ func (proc *smartContractResultsProcessor) processSCRsFromMiniblock( indexerSCRs := make([]*indexerData.ScResult, 0, len(mb.TxHashes)) for _, scrHash := range mb.TxHashes { - scrHandler, ok := scrs[string(scrHash)] + scrHashHex := hex.EncodeToString(scrHash) + scrInfo, ok := scrs[scrHashHex] if !ok { log.Warn("smartContractResultsProcessor.processSCRsFromMiniblock scr not found in map", - "scr hash", hex.EncodeToString(scrHash), + "scr hash", scrHashHex, ) continue } - scr, ok := scrHandler.GetTxHandler().(*smartContractResult.SmartContractResult) - if !ok { - continue - } - indexerSCR := proc.prepareSmartContractResult(scrHash, mbHash, scr, header, mb.SenderShardID, mb.ReceiverShardID, scrHandler.GetFee(), scrHandler.GetGasUsed(), numOfShards) + indexerSCR := proc.prepareSmartContractResult(hex.EncodeToString(scrHash), mbHash, scrInfo, header, mb.SenderShardID, mb.ReceiverShardID, numOfShards) indexerSCRs = append(indexerSCRs, indexerSCR) - delete(scrs, string(scrHash)) + delete(scrs, scrHashHex) } return indexerSCRs } func (proc *smartContractResultsProcessor) prepareSmartContractResult( - scrHash []byte, + scrHashHex string, mbHash []byte, - scr *smartContractResult.SmartContractResult, + scrInfo *outport.SCRInfo, header coreData.HeaderHandler, senderShard uint32, receiverShard uint32, - initialTxFee *big.Int, - initialTxGasUsed uint64, numOfShards uint32, ) *indexerData.ScResult { + scr := scrInfo.SmartContractResult hexEncodedMBHash := "" if len(mbHash) > 0 { hexEncodedMBHash = hex.EncodeToString(mbHash) @@ -131,7 +119,7 @@ func (proc *smartContractResultsProcessor) prepareSmartContractResult( relayerAddr := "" if len(scr.RelayerAddr) > 0 { - relayerAddr = proc.pubKeyConverter.Encode(scr.RelayerAddr) + relayerAddr = proc.pubKeyConverter.SilentEncode(scr.RelayerAddr, log) } relayedValue := "" @@ -140,21 +128,25 @@ func (proc *smartContractResultsProcessor) prepareSmartContractResult( } originalSenderAddr := "" if scr.OriginalSender != nil { - originalSenderAddr = proc.pubKeyConverter.Encode(scr.OriginalSender) + originalSenderAddr = proc.pubKeyConverter.SilentEncode(scr.OriginalSender, log) } res := proc.dataFieldParser.Parse(scr.Data, scr.SndAddr, scr.RcvAddr, numOfShards) - valueNum, err := proc.balanceConverter.ComputeESDTBalanceAsFloat(scr.Value) + senderAddr := proc.pubKeyConverter.SilentEncode(scr.SndAddr, log) + receiverAddr := proc.pubKeyConverter.SilentEncode(scr.RcvAddr, log) + receiversAddr, _ := proc.pubKeyConverter.EncodeSlice(res.Receivers) + + valueNum, err := proc.balanceConverter.ConvertBigValueToFloat(scr.Value) if err != nil { log.Warn("smartContractResultsProcessor.prepareSmartContractResult cannot compute scr value as num", - "value", scr.Value, "hash", scrHash, "error", err) + "value", scr.Value, "hash", scrHashHex, "error", err) } esdtValuesNum, err := proc.balanceConverter.ComputeSliceOfStringsAsFloat(res.ESDTValues) if err != nil { log.Warn("smartContractResultsProcessor.prepareSmartContractResult cannot compute scr esdt values as num", - "esdt values", res.ESDTValues, "hash", scrHash, "error", err) + "esdt values", res.ESDTValues, "hash", scrHashHex, "error", err) } var esdtValues []string @@ -162,16 +154,17 @@ func (proc *smartContractResultsProcessor) prepareSmartContractResult( esdtValues = res.ESDTValues } + feeInfo := getFeeInfo(scrInfo) return &indexerData.ScResult{ - Hash: hex.EncodeToString(scrHash), + Hash: scrHashHex, MBHash: hexEncodedMBHash, Nonce: scr.Nonce, GasLimit: scr.GasLimit, GasPrice: scr.GasPrice, Value: scr.Value.String(), ValueNum: valueNum, - Sender: proc.pubKeyConverter.Encode(scr.SndAddr), - Receiver: proc.pubKeyConverter.Encode(scr.RcvAddr), + Sender: senderAddr, + Receiver: receiverAddr, RelayerAddr: relayerAddr, RelayedValue: relayedValue, Code: string(scr.Code), @@ -190,17 +183,17 @@ func (proc *smartContractResultsProcessor) prepareSmartContractResult( ESDTValues: esdtValues, ESDTValuesNum: esdtValuesNum, Tokens: converters.TruncateSliceElementsIfExceedsMaxLength(res.Tokens), - Receivers: datafield.EncodeBytesSlice(proc.pubKeyConverter.Encode, res.Receivers), + Receivers: receiversAddr, ReceiversShardIDs: res.ReceiversShardID, IsRelayed: res.IsRelayed, OriginalSender: originalSenderAddr, - InitialTxFee: initialTxFee.String(), - InitialTxGasUsed: initialTxGasUsed, + InitialTxFee: feeInfo.Fee.String(), + InitialTxGasUsed: feeInfo.GasUsed, } } -func copySCRSMap(initial map[string]data.TransactionHandlerWithGasUsedAndFee) map[string]data.TransactionHandlerWithGasUsedAndFee { - newMap := make(map[string]data.TransactionHandlerWithGasUsedAndFee) +func copySCRSMap(initial map[string]*outport.SCRInfo) map[string]*outport.SCRInfo { + newMap := make(map[string]*outport.SCRInfo) for key, value := range initial { newMap[key] = value } diff --git a/process/elasticproc/transactions/smartContractResultsProcessor_test.go b/process/elasticproc/transactions/smartContractResultsProcessor_test.go index 0ec7429a..3feb64f4 100644 --- a/process/elasticproc/transactions/smartContractResultsProcessor_test.go +++ b/process/elasticproc/transactions/smartContractResultsProcessor_test.go @@ -7,6 +7,7 @@ import ( "time" "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/smartContractResult" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/mock" @@ -38,6 +39,7 @@ func TestPrepareSmartContractResult(t *testing.T) { code := []byte("code") sndAddr, rcvAddr := []byte("snd"), []byte("rec") scHash := "scHash" + smartContractRes := &smartContractResult.SmartContractResult{ Nonce: nonce, PrevTxHash: txHash, @@ -47,19 +49,33 @@ func TestPrepareSmartContractResult(t *testing.T) { RcvAddr: rcvAddr, CallType: 1, } + + scrInfo := &outport.SCRInfo{ + SmartContractResult: smartContractRes, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } + header := &block.Header{TimeStamp: 100} mbHash := []byte("hash") - scRes := scrsProc.prepareSmartContractResult([]byte(scHash), mbHash, smartContractRes, header, 0, 1, big.NewInt(0), 0, 3) + scRes := scrsProc.prepareSmartContractResult(scHash, mbHash, scrInfo, header, 0, 1, 3) + + senderAddr, err := pubKeyConverter.Encode(sndAddr) + require.Nil(t, err) + receiverAddr, err := pubKeyConverter.Encode(rcvAddr) + require.Nil(t, err) + expectedTx := &data.ScResult{ Nonce: nonce, - Hash: hex.EncodeToString([]byte(scHash)), + Hash: scHash, PrevTxHash: hex.EncodeToString(txHash), MBHash: hex.EncodeToString(mbHash), Code: string(code), Data: make([]byte, 0), - Sender: pubKeyConverter.Encode(sndAddr), - Receiver: pubKeyConverter.Encode(rcvAddr), + Sender: senderAddr, + Receiver: receiverAddr, Value: "", CallType: "1", Timestamp: time.Duration(100), diff --git a/process/elasticproc/transactions/transactionDBBuilder.go b/process/elasticproc/transactions/transactionDBBuilder.go index 6ce0265b..727563ce 100644 --- a/process/elasticproc/transactions/transactionDBBuilder.go +++ b/process/elasticproc/transactions/transactionDBBuilder.go @@ -3,20 +3,18 @@ package transactions import ( "encoding/hex" "fmt" - "math/big" "time" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/sharding" coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/receipt" "github.com/multiversx/mx-chain-core-go/data/rewardTx" - "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/converters" - datafield "github.com/multiversx/mx-chain-vm-common-go/parsers/dataField" ) type dbTransactionBuilder struct { @@ -38,33 +36,38 @@ func newTransactionDBBuilder( } func (dtb *dbTransactionBuilder) prepareTransaction( - tx *transaction.Transaction, + txInfo *outport.TxInfo, txHash []byte, mbHash []byte, mb *block.MiniBlock, header coreData.HeaderHandler, txStatus string, - fee *big.Int, - gasUsed uint64, - initialPaidFee *big.Int, numOfShards uint32, ) *data.Transaction { + tx := txInfo.Transaction + isScCall := core.IsSmartContractAddress(tx.RcvAddr) res := dtb.dataFieldParser.Parse(tx.Data, tx.SndAddr, tx.RcvAddr, numOfShards) + receiverAddr := dtb.addressPubkeyConverter.SilentEncode(tx.RcvAddr, log) + senderAddr := dtb.addressPubkeyConverter.SilentEncode(tx.SndAddr, log) + receiversAddr, _ := dtb.addressPubkeyConverter.EncodeSlice(res.Receivers) + receiverShardID := mb.ReceiverShardID if mb.Type == block.InvalidBlock { receiverShardID = sharding.ComputeShardID(tx.RcvAddr, numOfShards) } - valueNum, err := dtb.balanceConverter.ComputeESDTBalanceAsFloat(tx.Value) + valueNum, err := dtb.balanceConverter.ConvertBigValueToFloat(tx.Value) if err != nil { log.Warn("dbTransactionBuilder.prepareTransaction: cannot compute value as num", "value", tx.Value, "hash", txHash, "error", err) } - feeNum, err := dtb.balanceConverter.ComputeESDTBalanceAsFloat(fee) + + feeInfo := getFeeInfo(txInfo) + feeNum, err := dtb.balanceConverter.ConvertBigValueToFloat(feeInfo.Fee) if err != nil { - log.Warn("dbTransactionBuilder.prepareTransaction: cannot compute transaction fee as num", "fee", fee, + log.Warn("dbTransactionBuilder.prepareTransaction: cannot compute transaction fee as num", "fee", feeInfo.Fee, "hash", txHash, "error", err) } esdtValuesNum, err := dtb.balanceConverter.ComputeSliceOfStringsAsFloat(res.ESDTValues) @@ -79,7 +82,7 @@ func (dtb *dbTransactionBuilder) prepareTransaction( } guardianAddress := "" if len(tx.GuardianAddr) > 0 { - guardianAddress = dtb.addressPubkeyConverter.Encode(tx.GuardianAddr) + guardianAddress = dtb.addressPubkeyConverter.SilentEncode(tx.GuardianAddr, log) } senderUserName := converters.TruncateFieldIfExceedsMaxLengthBase64(string(tx.SndUserName)) @@ -90,9 +93,9 @@ func (dtb *dbTransactionBuilder) prepareTransaction( Nonce: tx.Nonce, Round: header.GetRound(), Value: tx.Value.String(), + Receiver: receiverAddr, + Sender: senderAddr, ValueNum: valueNum, - Receiver: dtb.addressPubkeyConverter.Encode(tx.RcvAddr), - Sender: dtb.addressPubkeyConverter.Encode(tx.SndAddr), ReceiverShard: receiverShardID, SenderShard: mb.SenderShardID, GasPrice: tx.GasPrice, @@ -101,9 +104,9 @@ func (dtb *dbTransactionBuilder) prepareTransaction( Signature: hex.EncodeToString(tx.Signature), Timestamp: time.Duration(header.GetTimeStamp()), Status: txStatus, - GasUsed: gasUsed, - InitialPaidFee: initialPaidFee.String(), - Fee: fee.String(), + GasUsed: feeInfo.GasUsed, + InitialPaidFee: feeInfo.InitialPaidFee.String(), + Fee: feeInfo.Fee.String(), FeeNum: feeNum, ReceiverUserName: []byte(receiverUserName), SenderUserName: []byte(senderUserName), @@ -113,7 +116,7 @@ func (dtb *dbTransactionBuilder) prepareTransaction( ESDTValues: esdtValues, ESDTValuesNum: esdtValuesNum, Tokens: converters.TruncateSliceElementsIfExceedsMaxLength(res.Tokens), - Receivers: datafield.EncodeBytesSlice(dtb.addressPubkeyConverter.Encode, res.Receivers), + Receivers: receiversAddr, ReceiversShardIDs: res.ReceiversShardID, IsRelayed: res.IsRelayed, Version: tx.Version, @@ -130,12 +133,14 @@ func (dtb *dbTransactionBuilder) prepareRewardTransaction( header coreData.HeaderHandler, txStatus string, ) *data.Transaction { - valueNum, err := dtb.balanceConverter.ComputeESDTBalanceAsFloat(rTx.Value) + valueNum, err := dtb.balanceConverter.ConvertBigValueToFloat(rTx.Value) if err != nil { log.Warn("dbTransactionBuilder.prepareRewardTransaction cannot compute value as num", "value", rTx.Value, "hash", txHash, "error", err) } + receiverAddr := dtb.addressPubkeyConverter.SilentEncode(rTx.RcvAddr, log) + return &data.Transaction{ Hash: hex.EncodeToString(txHash), MBHash: hex.EncodeToString(mbHash), @@ -143,7 +148,7 @@ func (dtb *dbTransactionBuilder) prepareRewardTransaction( Round: rTx.Round, Value: rTx.Value.String(), ValueNum: valueNum, - Receiver: dtb.addressPubkeyConverter.Encode(rTx.RcvAddr), + Receiver: receiverAddr, Sender: fmt.Sprintf("%d", core.MetachainShardId), ReceiverShard: mb.ReceiverShardID, SenderShard: mb.SenderShardID, @@ -158,14 +163,16 @@ func (dtb *dbTransactionBuilder) prepareRewardTransaction( } func (dtb *dbTransactionBuilder) prepareReceipt( - recHash string, + recHashHex string, rec *receipt.Receipt, header coreData.HeaderHandler, ) *data.Receipt { + senderAddr := dtb.addressPubkeyConverter.SilentEncode(rec.SndAddr, log) + return &data.Receipt{ - Hash: hex.EncodeToString([]byte(recHash)), + Hash: recHashHex, Value: rec.Value.String(), - Sender: dtb.addressPubkeyConverter.Encode(rec.SndAddr), + Sender: senderAddr, Data: string(rec.Data), TxHash: hex.EncodeToString(rec.TxHash), Timestamp: time.Duration(header.GetTimeStamp()), diff --git a/process/elasticproc/transactions/transactionDBBuilder_test.go b/process/elasticproc/transactions/transactionDBBuilder_test.go index 2411ce84..5b3d8759 100644 --- a/process/elasticproc/transactions/transactionDBBuilder_test.go +++ b/process/elasticproc/transactions/transactionDBBuilder_test.go @@ -9,6 +9,7 @@ import ( "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/rewardTx" "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-es-indexer-go/data" @@ -53,6 +54,21 @@ func TestGetMoveBalanceTransaction(t *testing.T) { SndUserName: []byte("snd"), } + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 500, + Fee: big.NewInt(100), + InitialPaidFee: big.NewInt(100), + }, + ExecutionOrder: 0, + } + + senderAddr, err := cp.addressPubkeyConverter.Encode(tx.RcvAddr) + require.Nil(t, err) + receiverAddr, err := cp.addressPubkeyConverter.Encode(tx.SndAddr) + require.Nil(t, err) + expectedTx := &data.Transaction{ Hash: hex.EncodeToString(txHash), MBHash: hex.EncodeToString(mbHash), @@ -60,8 +76,8 @@ func TestGetMoveBalanceTransaction(t *testing.T) { Round: header.Round, Value: tx.Value.String(), ValueNum: 1e-15, - Receiver: cp.addressPubkeyConverter.Encode(tx.RcvAddr), - Sender: cp.addressPubkeyConverter.Encode(tx.SndAddr), + Receiver: senderAddr, + Sender: receiverAddr, ReceiverShard: mb.ReceiverShardID, SenderShard: mb.SenderShardID, GasPrice: gasPrice, @@ -82,7 +98,7 @@ func TestGetMoveBalanceTransaction(t *testing.T) { Receivers: []string{}, } - dbTx := cp.prepareTransaction(tx, txHash, mbHash, mb, header, status, big.NewInt(100), 500, big.NewInt(100), 3) + dbTx := cp.prepareTransaction(txInfo, txHash, mbHash, mb, header, status, 3) require.Equal(t, expectedTx, dbTx) } @@ -150,8 +166,8 @@ func TestGetMoveBalanceTransactionInvalid(t *testing.T) { Round: header.Round, Value: tx.Value.String(), ValueNum: 1e-15, - Receiver: cp.addressPubkeyConverter.Encode(tx.RcvAddr), - Sender: cp.addressPubkeyConverter.Encode(tx.SndAddr), + Receiver: cp.addressPubkeyConverter.SilentEncode(tx.RcvAddr, log), + Sender: cp.addressPubkeyConverter.SilentEncode(tx.SndAddr, log), ReceiverShard: uint32(2), SenderShard: mb.SenderShardID, GasPrice: gasPrice, @@ -172,6 +188,16 @@ func TestGetMoveBalanceTransactionInvalid(t *testing.T) { ESDTValuesNum: []float64{}, } - dbTx := cp.prepareTransaction(tx, txHash, mbHash, mb, header, status, big.NewInt(100), 500, big.NewInt(100), 3) + txInfo := &outport.TxInfo{ + Transaction: tx, + FeeInfo: &outport.FeeInfo{ + GasUsed: 500, + Fee: big.NewInt(100), + InitialPaidFee: big.NewInt(100), + }, + ExecutionOrder: 0, + } + + dbTx := cp.prepareTransaction(txInfo, txHash, mbHash, mb, header, status, 3) require.Equal(t, expectedTx, dbTx) } diff --git a/process/elasticproc/transactions/transactionsGrouper.go b/process/elasticproc/transactions/transactionsGrouper.go index 30ccc6d6..c6073914 100644 --- a/process/elasticproc/transactions/transactionsGrouper.go +++ b/process/elasticproc/transactions/transactionsGrouper.go @@ -1,11 +1,13 @@ package transactions import ( + "encoding/hex" + "github.com/multiversx/mx-chain-core-go/core" coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/receipt" - "github.com/multiversx/mx-chain-core-go/data/rewardTx" "github.com/multiversx/mx-chain-core-go/data/transaction" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" @@ -38,7 +40,7 @@ func (tg *txsGrouper) groupNormalTxs( mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.TxInfo, isImportDB bool, numOfShards uint32, ) (map[string]*data.Transaction, error) { @@ -92,21 +94,16 @@ func (tg *txsGrouper) prepareNormalTxForDB( mb *block.MiniBlock, mbStatus string, txHash []byte, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.TxInfo, header coreData.HeaderHandler, numOfShards uint32, ) (*data.Transaction, bool) { - txHandler, okGet := txs[string(txHash)] + txInfo, okGet := txs[hex.EncodeToString(txHash)] if !okGet { return nil, false } - tx, okCast := txHandler.GetTxHandler().(*transaction.Transaction) - if !okCast { - return nil, false - } - - dbTx := tg.txBuilder.prepareTransaction(tx, txHash, mbHash, mb, header, mbStatus, txHandler.GetFee(), txHandler.GetGasUsed(), txHandler.GetInitialPaidFee(), numOfShards) + dbTx := tg.txBuilder.prepareTransaction(txInfo, txHash, mbHash, mb, header, mbStatus, numOfShards) return dbTx, true } @@ -115,7 +112,7 @@ func (tg *txsGrouper) groupRewardsTxs( mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.RewardInfo, isImportDB bool, ) (map[string]*data.Transaction, error) { rewardsTxs := make(map[string]*data.Transaction) @@ -146,20 +143,15 @@ func (tg *txsGrouper) prepareRewardTxForDB( mb *block.MiniBlock, mbStatus string, txHash []byte, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.RewardInfo, header coreData.HeaderHandler, ) (*data.Transaction, bool) { - txHandler, okGet := txs[string(txHash)] + rtx, okGet := txs[hex.EncodeToString(txHash)] if !okGet { return nil, false } - rtx, okCast := txHandler.GetTxHandler().(*rewardTx.RewardTx) - if !okCast { - return nil, false - } - - dbTx := tg.txBuilder.prepareRewardTransaction(rtx, txHash, mbHash, mb, header, mbStatus) + dbTx := tg.txBuilder.prepareRewardTransaction(rtx.Reward, txHash, mbHash, mb, header, mbStatus) return dbTx, true } @@ -168,7 +160,7 @@ func (tg *txsGrouper) groupInvalidTxs( mbIndex int, mb *block.MiniBlock, header coreData.HeaderHandler, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.TxInfo, numOfShards uint32, ) (map[string]*data.Transaction, error) { transactions := make(map[string]*data.Transaction) @@ -194,21 +186,16 @@ func (tg *txsGrouper) prepareInvalidTxForDB( mbHash []byte, mb *block.MiniBlock, txHash []byte, - txs map[string]coreData.TransactionHandlerWithGasUsedAndFee, + txs map[string]*outport.TxInfo, header coreData.HeaderHandler, numOfShards uint32, ) (*data.Transaction, bool) { - txHandler, okGet := txs[string(txHash)] + txInfo, okGet := txs[hex.EncodeToString(txHash)] if !okGet { return nil, false } - tx, okCast := txHandler.GetTxHandler().(*transaction.Transaction) - if !okCast { - return nil, false - } - - dbTx := tg.txBuilder.prepareTransaction(tx, txHash, mbHash, mb, header, transaction.TxStatusInvalid.String(), txHandler.GetFee(), txHandler.GetGasUsed(), txHandler.GetInitialPaidFee(), numOfShards) + dbTx := tg.txBuilder.prepareTransaction(txInfo, txHash, mbHash, mb, header, transaction.TxStatusInvalid.String(), numOfShards) return dbTx, true } @@ -221,15 +208,10 @@ func (tg *txsGrouper) shouldIndex(destinationShardID uint32, isImportDB bool, se return selfShardID == destinationShardID } -func (tg *txsGrouper) groupReceipts(header coreData.HeaderHandler, txsPool map[string]coreData.TransactionHandlerWithGasUsedAndFee) []*data.Receipt { +func (tg *txsGrouper) groupReceipts(header coreData.HeaderHandler, txsPool map[string]*receipt.Receipt) []*data.Receipt { dbReceipts := make([]*data.Receipt, 0) - for hash, tx := range txsPool { - rec, ok := tx.GetTxHandler().(*receipt.Receipt) - if !ok { - continue - } - - dbReceipts = append(dbReceipts, tg.txBuilder.prepareReceipt(hash, rec, header)) + for hashHex, rec := range txsPool { + dbReceipts = append(dbReceipts, tg.txBuilder.prepareReceipt(hashHex, rec, header)) } return dbReceipts diff --git a/process/elasticproc/transactions/transactionsGrouper_test.go b/process/elasticproc/transactions/transactionsGrouper_test.go index be81979a..12c6b75e 100644 --- a/process/elasticproc/transactions/transactionsGrouper_test.go +++ b/process/elasticproc/transactions/transactionsGrouper_test.go @@ -1,10 +1,9 @@ package transactions import ( - "math/big" + "encoding/hex" "testing" - coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/receipt" @@ -30,15 +29,21 @@ func TestGroupNormalTxs(t *testing.T) { Type: block.TxBlock, } header := &block.Header{} - txs := map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - SndAddr: []byte("sender1"), - RcvAddr: []byte("receiver1"), - }, 0, big.NewInt(0)), - string(txHash2): outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - SndAddr: []byte("sender2"), - RcvAddr: []byte("receiver2"), - }, 0, big.NewInt(0)), + txs := map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): { + Transaction: &transaction.Transaction{ + SndAddr: []byte("sender1"), + RcvAddr: []byte("receiver1"), + }, + FeeInfo: &outport.FeeInfo{}, + }, + hex.EncodeToString(txHash2): { + Transaction: &transaction.Transaction{ + SndAddr: []byte("sender2"), + RcvAddr: []byte("receiver2"), + }, + FeeInfo: &outport.FeeInfo{}, + }, } normalTxs, _ := grouper.groupNormalTxs(0, mb, header, txs, false, 3) @@ -60,13 +65,13 @@ func TestGroupRewardsTxs(t *testing.T) { Type: block.RewardsBlock, } header := &block.Header{} - txs := map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): outport.NewTransactionHandlerWithGasAndFee(&rewardTx.RewardTx{ + txs := map[string]*outport.RewardInfo{ + hex.EncodeToString(txHash1): {Reward: &rewardTx.RewardTx{ RcvAddr: []byte("receiver1"), - }, 0, big.NewInt(0)), - string(txHash2): outport.NewTransactionHandlerWithGasAndFee(&rewardTx.RewardTx{ + }}, + hex.EncodeToString(txHash2): {Reward: &rewardTx.RewardTx{ RcvAddr: []byte("receiver2"), - }, 0, big.NewInt(0)), + }}, } normalTxs, _ := grouper.groupRewardsTxs(0, mb, header, txs, false) @@ -88,15 +93,17 @@ func TestGroupInvalidTxs(t *testing.T) { Type: block.InvalidBlock, } header := &block.Header{} - txs := map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - SndAddr: []byte("sender1"), - RcvAddr: []byte("receiver1"), - }, 0, big.NewInt(0)), - string(txHash2): outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - SndAddr: []byte("sender2"), - RcvAddr: []byte("receiver2"), - }, 0, big.NewInt(0)), + txs := map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): { + Transaction: &transaction.Transaction{ + SndAddr: []byte("sender1"), + RcvAddr: []byte("receiver1"), + }, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString(txHash2): { + Transaction: &transaction.Transaction{ + SndAddr: []byte("sender2"), + RcvAddr: []byte("receiver2"), + }, FeeInfo: &outport.FeeInfo{}}, } normalTxs, _ := grouper.groupInvalidTxs(0, mb, header, txs, 3) @@ -114,15 +121,15 @@ func TestGroupReceipts(t *testing.T) { txHash1 := []byte("txHash1") txHash2 := []byte("txHash2") header := &block.Header{} - txs := map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ + txs := map[string]*receipt.Receipt{ + hex.EncodeToString(txHash1): { SndAddr: []byte("sender1"), - }, 0, big.NewInt(0)), - string(txHash2): outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ + }, + hex.EncodeToString(txHash2): { SndAddr: []byte("sender2"), - }, 0, big.NewInt(0)), + }, } - normalTxs := grouper.groupReceipts(header, txs) - require.Len(t, normalTxs, 2) + receipts := grouper.groupReceipts(header, txs) + require.Len(t, receipts, 2) } diff --git a/process/elasticproc/transactions/transactionsProcessor.go b/process/elasticproc/transactions/transactionsProcessor.go index 7022b8bc..86e59b38 100644 --- a/process/elasticproc/transactions/transactionsProcessor.go +++ b/process/elasticproc/transactions/transactionsProcessor.go @@ -2,6 +2,7 @@ package transactions import ( "encoding/hex" + "math/big" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" @@ -65,13 +66,13 @@ func NewTransactionsProcessor(args *ArgsTransactionProcessor) (*txsDatabaseProce // PrepareTransactionsForDatabase will prepare transactions for database func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( - body *block.Body, + miniBlocks []*block.MiniBlock, header coreData.HeaderHandler, - pool *outport.Pool, + pool *outport.TransactionPool, isImportDB bool, numOfShards uint32, ) *data.PreparedResults { - err := checkPrepareTransactionForDatabaseArguments(body, header, pool) + err := checkPrepareTransactionForDatabaseArguments(header, pool) if err != nil { log.Warn("checkPrepareTransactionForDatabaseArguments", "error", err) @@ -85,14 +86,14 @@ func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( normalTxs := make(map[string]*data.Transaction) rewardsTxs := make(map[string]*data.Transaction) - for mbIndex, mb := range body.MiniBlocks { + for mbIndex, mb := range miniBlocks { switch mb.Type { case block.TxBlock: if shouldIgnoreProcessedMBScheduled(header, mbIndex) { continue } - txs, errGroup := tdp.txsGrouper.groupNormalTxs(mbIndex, mb, header, pool.Txs, isImportDB, numOfShards) + txs, errGroup := tdp.txsGrouper.groupNormalTxs(mbIndex, mb, header, pool.Transactions, isImportDB, numOfShards) if errGroup != nil { log.Warn("txsDatabaseProcessor.groupNormalTxs", "error", errGroup) continue @@ -106,7 +107,7 @@ func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( } mergeTxsMaps(rewardsTxs, txs) case block.InvalidBlock: - txs, errGroup := tdp.txsGrouper.groupInvalidTxs(mbIndex, mb, header, pool.Invalid, numOfShards) + txs, errGroup := tdp.txsGrouper.groupInvalidTxs(mbIndex, mb, header, pool.InvalidTxs, numOfShards) if errGroup != nil { log.Warn("txsDatabaseProcessor.groupInvalidTxs", "error", errGroup) continue @@ -119,11 +120,11 @@ func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( normalTxs = tdp.setTransactionSearchOrder(normalTxs) dbReceipts := tdp.txsGrouper.groupReceipts(header, pool.Receipts) - dbSCResults := tdp.scrsProc.processSCRs(body, header, pool.Scrs, numOfShards) + dbSCResults := tdp.scrsProc.processSCRs(miniBlocks, header, pool.SmartContractResults, numOfShards) srcsNoTxInCurrentShard := tdp.scrsDataToTxs.attachSCRsToTransactionsAndReturnSCRsWithoutTx(normalTxs, dbSCResults) tdp.scrsDataToTxs.processTransactionsAfterSCRsWereAttached(normalTxs) - txHashStatus, txHashFee := tdp.scrsDataToTxs.processSCRsWithoutTx(srcsNoTxInCurrentShard) + txHashFee := tdp.scrsDataToTxs.processSCRsWithoutTx(srcsNoTxInCurrentShard) sliceNormalTxs := convertMapTxsToSlice(normalTxs) sliceRewardsTxs := convertMapTxsToSlice(rewardsTxs) @@ -133,7 +134,6 @@ func (tdp *txsDatabaseProcessor) PrepareTransactionsForDatabase( Transactions: txsSlice, ScResults: dbSCResults, Receipts: dbReceipts, - TxHashStatus: txHashStatus, TxHashFee: txHashFee, } } @@ -184,12 +184,12 @@ func isCrossShardAtSourceNormalTx(selfShardID uint32, miniblock *block.MiniBlock } func shouldIgnoreProcessedMBScheduled(header coreData.HeaderHandler, mbIndex int) bool { - miniblockHeaders := header.GetMiniBlockHeaderHandlers() - if len(miniblockHeaders) <= mbIndex { + miniBlockHeaders := header.GetMiniBlockHeaderHandlers() + if len(miniBlockHeaders) <= mbIndex { return false } - processingType := miniblockHeaders[mbIndex].GetProcessingType() + processingType := miniBlockHeaders[mbIndex].GetProcessingType() return processingType == int32(block.Processed) } @@ -208,3 +208,15 @@ func mergeTxsMaps(dst, src map[string]*data.Transaction) { dst[key] = value } } + +func getFeeInfo(txWithFeeInfo feeInfoHandler) *outport.FeeInfo { + feeInfo := txWithFeeInfo.GetFeeInfo() + if feeInfo != nil { + return feeInfo + } + + return &outport.FeeInfo{ + Fee: big.NewInt(0), + InitialPaidFee: big.NewInt(0), + } +} diff --git a/process/elasticproc/transactions/transactionsProcessor_test.go b/process/elasticproc/transactions/transactionsProcessor_test.go index 0b20b10e..1348e427 100644 --- a/process/elasticproc/transactions/transactionsProcessor_test.go +++ b/process/elasticproc/transactions/transactionsProcessor_test.go @@ -1,12 +1,12 @@ package transactions import ( + "encoding/hex" "math/big" "testing" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/pubkeyConverter" - coreData "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/data/receipt" @@ -57,118 +57,161 @@ func TestPrepareTransactionsForDatabase(t *testing.T) { t.Parallel() txHash1 := []byte("txHash1") - tx1 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 100, - }, 0, big.NewInt(0)) + tx1 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 100, + }, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } txHash2 := []byte("txHash2") - tx2 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 100, - }, 0, big.NewInt(0)) + tx2 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 100, + }, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } txHash3 := []byte("txHash3") - tx3 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{}, 0, big.NewInt(0)) + tx3 := &outport.TxInfo{ + Transaction: &transaction.Transaction{}, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } txHash4 := []byte("txHash4") - tx4 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{}, 0, big.NewInt(0)) + tx4 := &outport.TxInfo{ + Transaction: &transaction.Transaction{}, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } txHash5 := []byte("txHash5") - tx5 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{}, 0, big.NewInt(0)) + tx5 := &outport.TxInfo{ + Transaction: &transaction.Transaction{}, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } rTx1Hash := []byte("rTxHash1") - rTx1 := outport.NewTransactionHandlerWithGasAndFee(&rewardTx.RewardTx{}, 0, big.NewInt(0)) + rTx1 := &outport.RewardInfo{ + Reward: &rewardTx.RewardTx{}, + } rTx2Hash := []byte("rTxHash2") - rTx2 := outport.NewTransactionHandlerWithGasAndFee(&rewardTx.RewardTx{}, 0, big.NewInt(0)) + rTx2 := &outport.RewardInfo{ + Reward: &rewardTx.RewardTx{}, + } recHash1 := []byte("recHash1") - rec1 := outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ + rec1 := &receipt.Receipt{ Value: big.NewInt(100), TxHash: txHash1, - }, 0, big.NewInt(0)) + } recHash2 := []byte("recHash2") - rec2 := outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ + rec2 := &receipt.Receipt{ Value: big.NewInt(200), TxHash: txHash2, - }, 0, big.NewInt(0)) + } scHash1 := []byte("scHash1") - scResult1 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash1, - PrevTxHash: txHash1, - GasLimit: 1, - }, 0, big.NewInt(0)) + scResult1 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash1, + PrevTxHash: txHash1, + GasLimit: 1, + }, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } scHash2 := []byte("scHash2") - scResult2 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash1, - PrevTxHash: txHash1, - GasLimit: 1, - }, 0, big.NewInt(0)) + scResult2 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash1, + PrevTxHash: txHash1, + GasLimit: 1, + }, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } scHash3 := []byte("scHash3") - scResult3 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash3, - Data: []byte("@" + "6F6B"), - }, 0, big.NewInt(0)) + scResult3 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash3, + Data: []byte("@" + "6F6B"), + }, + FeeInfo: &outport.FeeInfo{ + Fee: big.NewInt(0), + }, + } - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{txHash1, txHash2, txHash3}, - Type: block.TxBlock, - }, - { - TxHashes: [][]byte{txHash4}, - Type: block.TxBlock, - }, - { - TxHashes: [][]byte{scHash1, scHash2}, - Type: block.SmartContractResultBlock, - }, - { - TxHashes: [][]byte{scHash3}, - Type: block.SmartContractResultBlock, - }, - { - TxHashes: [][]byte{recHash1, recHash2}, - Type: block.ReceiptBlock, - }, - { - TxHashes: [][]byte{rTx1Hash, rTx2Hash}, - Type: block.RewardsBlock, - }, - { - TxHashes: [][]byte{txHash5}, - Type: block.InvalidBlock, - }, + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{txHash1, txHash2, txHash3}, + Type: block.TxBlock, + }, + { + TxHashes: [][]byte{txHash4}, + Type: block.TxBlock, + }, + { + TxHashes: [][]byte{scHash1, scHash2}, + Type: block.SmartContractResultBlock, + }, + { + TxHashes: [][]byte{scHash3}, + Type: block.SmartContractResultBlock, + }, + { + TxHashes: [][]byte{recHash1, recHash2}, + Type: block.ReceiptBlock, + }, + { + TxHashes: [][]byte{rTx1Hash, rTx2Hash}, + Type: block.RewardsBlock, + }, + { + TxHashes: [][]byte{txHash5}, + Type: block.InvalidBlock, }, } + header := &block.Header{} - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): tx1, - string(txHash2): tx2, - string(txHash3): tx3, - string(txHash4): tx4, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): tx1, + hex.EncodeToString(txHash2): tx2, + hex.EncodeToString(txHash3): tx3, + hex.EncodeToString(txHash4): tx4, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scHash1): scResult1, - string(scHash2): scResult2, - string(scHash3): scResult3, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scHash1): scResult1, + hex.EncodeToString(scHash2): scResult2, + hex.EncodeToString(scHash3): scResult3, }, - Rewards: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(rTx1Hash): rTx1, - string(rTx2Hash): rTx2, + Rewards: map[string]*outport.RewardInfo{ + hex.EncodeToString(rTx1Hash): rTx1, + hex.EncodeToString(rTx2Hash): rTx2, }, - Invalid: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash5): tx5, + InvalidTxs: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash5): tx5, }, - Receipts: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(recHash1): rec1, - string(recHash2): rec2, + Receipts: map[string]*receipt.Receipt{ + hex.EncodeToString(recHash1): rec1, + hex.EncodeToString(recHash2): rec2, }, } txDbProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) - results := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + results := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) assert.Equal(t, 7, len(results.Transactions)) } @@ -177,53 +220,54 @@ func TestRelayedTransactions(t *testing.T) { t.Parallel() txHash1 := []byte("txHash1") - tx1 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 100, - Data: []byte("relayedTx@blablabllablalba"), - }, 0, big.NewInt(0)) + tx1 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 100, + Data: []byte("relayedTx@blablabllablalba"), + }, FeeInfo: &outport.FeeInfo{}} scHash1 := []byte("scHash1") - scResult1 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash1, - PrevTxHash: txHash1, - GasLimit: 1, - }, 0, big.NewInt(0)) + scResult1 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash1, + PrevTxHash: txHash1, + GasLimit: 1, + }, FeeInfo: &outport.FeeInfo{}} scHash2 := []byte("scHash2") - scResult2 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash1, - PrevTxHash: txHash1, - GasLimit: 1, - }, 0, big.NewInt(0)) - - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{txHash1}, - Type: block.TxBlock, - }, - { - TxHashes: [][]byte{scHash1, scHash2}, - Type: block.SmartContractResultBlock, - }, + scResult2 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash1, + PrevTxHash: txHash1, + GasLimit: 1, + }, FeeInfo: &outport.FeeInfo{}} + + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{txHash1}, + Type: block.TxBlock, + }, + { + TxHashes: [][]byte{scHash1, scHash2}, + Type: block.SmartContractResultBlock, }, } header := &block.Header{} - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): tx1, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): tx1, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scHash1): scResult1, - string(scHash2): scResult2, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scHash1): scResult1, + hex.EncodeToString(scHash2): scResult2, }, } txDbProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) - results := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + results := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) assert.Equal(t, 1, len(results.Transactions)) assert.Equal(t, 2, len(results.Transactions[0].SmartContractResults)) assert.Equal(t, transaction.TxStatusSuccess.String(), results.Transactions[0].Status) @@ -295,43 +339,45 @@ func TestCheckGasUsedInvalidTransaction(t *testing.T) { txDbProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) txHash1 := []byte("txHash1") - tx1 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 100, - }, 100, big.NewInt(0)) + tx1 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 100, + }, + FeeInfo: &outport.FeeInfo{ + GasUsed: 100, + }, + } recHash1 := []byte("recHash1") - rec1 := outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ + rec1 := &receipt.Receipt{ Value: big.NewInt(100), TxHash: txHash1, - }, 0, big.NewInt(0)) + } - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{txHash1}, - Type: block.InvalidBlock, - }, - { - TxHashes: [][]byte{recHash1}, - Type: block.ReceiptBlock, - }, + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{txHash1}, + Type: block.InvalidBlock, + }, + { + TxHashes: [][]byte{recHash1}, + Type: block.ReceiptBlock, }, } - header := &block.Header{} - pool := &outport.Pool{ - Invalid: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): tx1, + pool := &outport.TransactionPool{ + InvalidTxs: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): tx1, }, - Receipts: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(recHash1): rec1, + Receipts: map[string]*receipt.Receipt{ + hex.EncodeToString(recHash1): rec1, }, } - results := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + results := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) require.Len(t, results.Transactions, 1) - require.Equal(t, tx1.GetGasLimit(), results.Transactions[0].GasUsed) + require.Equal(t, tx1.Transaction.GetGasLimit(), results.Transactions[0].GasUsed) } func TestGetRewardsTxsHashesHexEncoded(t *testing.T) { @@ -421,41 +467,45 @@ func TestTxsDatabaseProcessor_PrepareTransactionsForDatabaseInvalidTxWithSCR(t * txDbProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) txHash1 := []byte("txHash1") - tx1 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 123456, - Data: []byte("ESDTTransfer@54474e2d383862383366@0a"), - }, 100, big.NewInt(0)) + tx1 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 123456, + Data: []byte("ESDTTransfer@54474e2d383862383366@0a"), + }, + FeeInfo: &outport.FeeInfo{GasUsed: 100}, + } scResHash1 := []byte("scResHash1") - scRes1 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - OriginalTxHash: txHash1, - }, 0, big.NewInt(0)) + scRes1 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + OriginalTxHash: txHash1, + }, + FeeInfo: &outport.FeeInfo{}, + } - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{txHash1}, - Type: block.InvalidBlock, - }, - { - TxHashes: [][]byte{scResHash1}, - Type: block.SmartContractResultBlock, - }, + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{txHash1}, + Type: block.InvalidBlock, + }, + { + TxHashes: [][]byte{scResHash1}, + Type: block.SmartContractResultBlock, }, } header := &block.Header{} - pool := &outport.Pool{ - Invalid: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): tx1, + pool := &outport.TransactionPool{ + InvalidTxs: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): tx1, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(scResHash1): scRes1, + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString(scResHash1): scRes1, }, } - results := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + results := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) require.NotNil(t, results) require.Len(t, results.Transactions, 1) require.Len(t, results.ScResults, 1) @@ -472,44 +522,47 @@ func TestTxsDatabaseProcessor_PrepareTransactionsForDatabaseESDTNFTTransfer(t *t txDbProc, _ := NewTransactionsProcessor(createMockArgsTxsDBProc()) txHash1 := []byte("txHash1") - tx1 := outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - GasLimit: 100, - GasPrice: 123456, - Data: []byte("ESDTNFTTransfer@595959453643392D303837363661@01@01@000000000000000005005C83E0C42EDCE394F40B24D29D298B0249C41F028974@66756E64@890479AFC610F4BEBC087D3ADA3F7C2775C736BBA91F41FD3D65092AA482D8B0@1c20"), - }, 100, big.NewInt(0)) + tx1 := &outport.TxInfo{ + Transaction: &transaction.Transaction{ + GasLimit: 100, + GasPrice: 123456, + Data: []byte("ESDTNFTTransfer@595959453643392D303837363661@01@01@000000000000000005005C83E0C42EDCE394F40B24D29D298B0249C41F028974@66756E64@890479AFC610F4BEBC087D3ADA3F7C2775C736BBA91F41FD3D65092AA482D8B0@1c20"), + }, + FeeInfo: &outport.FeeInfo{GasUsed: 100}, + } scResHash1 := []byte("scResHash1") - scRes1 := outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - Nonce: 1, - Data: []byte("@" + okHexEncoded), - OriginalTxHash: txHash1, - PrevTxHash: txHash1, - }, 0, big.NewInt(0)) + scRes1 := &outport.SCRInfo{ + SmartContractResult: &smartContractResult.SmartContractResult{ + Nonce: 1, + Data: []byte("@" + okHexEncoded), + OriginalTxHash: txHash1, + PrevTxHash: txHash1, + }, + FeeInfo: &outport.FeeInfo{}} - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{txHash1}, - Type: block.TxBlock, - }, - { - TxHashes: [][]byte{scResHash1}, - Type: block.SmartContractResultBlock, - }, + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{txHash1}, + Type: block.TxBlock, + }, + { + TxHashes: [][]byte{scResHash1}, + Type: block.SmartContractResultBlock, }, } header := &block.Header{} - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - string(txHash1): tx1, + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString(txHash1): tx1, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ + SmartContractResults: map[string]*outport.SCRInfo{ string(scResHash1): scRes1, }, } - results := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + results := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) require.NotNil(t, results) require.Len(t, results.Transactions, 1) require.Len(t, results.ScResults, 1) @@ -524,7 +577,7 @@ func TestTxsDatabaseProcessor_IssueESDTTx(t *testing.T) { t.Parallel() args := createMockArgsTxsDBProc() - pubKeyConv, _ := pubkeyConverter.NewBech32PubkeyConverter(32, log) + pubKeyConv, _ := pubkeyConverter.NewBech32PubkeyConverter(32, "erd") args.AddressPubkeyConverter = pubKeyConv txDbProc, _ := NewTransactionsProcessor(args) @@ -534,71 +587,69 @@ func TestTxsDatabaseProcessor_IssueESDTTx(t *testing.T) { } // transaction success - body := &block.Body{ - MiniBlocks: []*block.MiniBlock{ - { - TxHashes: [][]byte{[]byte("t1")}, - Type: block.TxBlock, - SenderShardID: 0, - ReceiverShardID: core.MetachainShardId, - }, - { - TxHashes: [][]byte{[]byte("scr1"), []byte("scr2")}, - Type: block.SmartContractResultBlock, - }, + mbs := []*block.MiniBlock{ + { + TxHashes: [][]byte{[]byte("t1")}, + Type: block.TxBlock, + SenderShardID: 0, + ReceiverShardID: core.MetachainShardId, + }, + { + TxHashes: [][]byte{[]byte("scr1"), []byte("scr2")}, + Type: block.SmartContractResultBlock, }, } header := &block.Header{ ShardID: core.MetachainShardId, } - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "t1": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + pool := &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString([]byte("t1")): {Transaction: &transaction.Transaction{ SndAddr: decodeBech32("erd1dglncxk6sl9a3xumj78n6z2xux4ghp5c92cstv5zsn56tjgtdwpsk46qrs"), RcvAddr: decodeBech32("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u"), Data: []byte("issue@4141414141@41414141414141@0186a0@01@63616e467265657a65@74727565@63616e57697065@74727565@63616e5061757365@74727565@63616e4d696e74@74727565@63616e4275726e@74727565@63616e4368616e67654f776e6572@74727565@63616e55706772616465@74727565"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "scr1": outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString([]byte("scr1")): {SmartContractResult: &smartContractResult.SmartContractResult{ OriginalTxHash: []byte("t1"), Data: []byte("ESDTTransfer@414141414141412d323436626461@0186a0"), SndAddr: decodeBech32("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u"), RcvAddr: decodeBech32("erd1dglncxk6sl9a3xumj78n6z2xux4ghp5c92cstv5zsn56tjgtdwpsk46qrs"), - }, 0, big.NewInt(0)), - "scr2": outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + }, FeeInfo: &outport.FeeInfo{}}, + hex.EncodeToString([]byte("scr2")): {SmartContractResult: &smartContractResult.SmartContractResult{ OriginalTxHash: []byte("t1"), Data: []byte("@6f6b"), SndAddr: decodeBech32("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u"), RcvAddr: decodeBech32("erd1dglncxk6sl9a3xumj78n6z2xux4ghp5c92cstv5zsn56tjgtdwpsk46qrs"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, }, } - res := txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) + res := txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) require.Equal(t, "success", res.Transactions[0].Status) require.Equal(t, 2, len(res.ScResults)) // transaction fail - pool = &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "t1": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ + pool = &outport.TransactionPool{ + Transactions: map[string]*outport.TxInfo{ + hex.EncodeToString([]byte("t1")): {Transaction: &transaction.Transaction{ SndAddr: decodeBech32("erd1dglncxk6sl9a3xumj78n6z2xux4ghp5c92cstv5zsn56tjgtdwpsk46qrs"), RcvAddr: decodeBech32("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u"), Data: []byte("issue@4141414141@41414141414141@0186a0@01@63616e467265657a65@74727565@63616e57697065@74727565@63616e5061757365@74727565@63616e4d696e74@74727565@63616e4275726e@74727565@63616e4368616e67654f776e6572@74727565@63616e55706772616465@74727565"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "scr1": outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ + SmartContractResults: map[string]*outport.SCRInfo{ + hex.EncodeToString([]byte("scr1")): {SmartContractResult: &smartContractResult.SmartContractResult{ OriginalTxHash: []byte("t1"), Data: []byte("75736572206572726f72"), SndAddr: decodeBech32("erd1qqqqqqqqqqqqqqqpqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqzllls8a5w6u"), RcvAddr: decodeBech32("erd1dglncxk6sl9a3xumj78n6z2xux4ghp5c92cstv5zsn56tjgtdwpsk46qrs"), - }, 0, big.NewInt(0)), + }, FeeInfo: &outport.FeeInfo{}}, }, } - res = txDbProc.PrepareTransactionsForDatabase(body, header, pool, false, 3) - require.Equal(t, "fail", res.Transactions[0].Status) + res = txDbProc.PrepareTransactionsForDatabase(mbs, header, pool, false, 3) + require.Equal(t, "success", res.Transactions[0].Status) require.Equal(t, 1, len(res.ScResults)) } diff --git a/process/elasticproc/updateTokenType.go b/process/elasticproc/updateTokenType.go index fe14e162..18eded2e 100644 --- a/process/elasticproc/updateTokenType.go +++ b/process/elasticproc/updateTokenType.go @@ -1,16 +1,18 @@ package elasticproc import ( + "context" "encoding/json" "fmt" "time" "github.com/multiversx/mx-chain-core-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/core/request" "github.com/multiversx/mx-chain-es-indexer-go/data" elasticIndexer "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" ) -func (ei *elasticProcessor) indexTokens(tokensData []*data.TokenInfo, updateNFTData []*data.NFTDataUpdate, buffSlice *data.BufferSlice) error { +func (ei *elasticProcessor) indexTokens(tokensData []*data.TokenInfo, updateNFTData []*data.NFTDataUpdate, buffSlice *data.BufferSlice, shardID uint32) error { err := ei.prepareAndAddSerializedDataForTokens(tokensData, updateNFTData, buffSlice, elasticIndexer.ESDTsIndex) if err != nil { return err @@ -20,12 +22,12 @@ func (ei *elasticProcessor) indexTokens(tokensData []*data.TokenInfo, updateNFTD return err } - err = ei.addTokenType(tokensData, elasticIndexer.AccountsESDTIndex) + err = ei.addTokenType(tokensData, elasticIndexer.AccountsESDTIndex, shardID) if err != nil { return err } - return ei.addTokenType(tokensData, elasticIndexer.TokensIndex) + return ei.addTokenType(tokensData, elasticIndexer.TokensIndex, shardID) } func (ei *elasticProcessor) prepareAndAddSerializedDataForTokens(tokensData []*data.TokenInfo, updateNFTData []*data.NFTDataUpdate, buffSlice *data.BufferSlice, index string) error { @@ -36,7 +38,7 @@ func (ei *elasticProcessor) prepareAndAddSerializedDataForTokens(tokensData []*d return ei.logsAndEventsProc.SerializeTokens(tokensData, updateNFTData, buffSlice, index) } -func (ei *elasticProcessor) addTokenType(tokensData []*data.TokenInfo, index string) error { +func (ei *elasticProcessor) addTokenType(tokensData []*data.TokenInfo, index string, shardID uint32) error { if len(tokensData) == 0 { return nil } @@ -68,16 +70,18 @@ func (ei *elasticProcessor) addTokenType(tokensData []*data.TokenInfo, index str return err } - return ei.doBulkRequests(index, buffSlice.Buffers()) + return ei.doBulkRequests(index, buffSlice.Buffers(), shardID) } + ctxWithValue := context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.GetTopic, shardID)) query := fmt.Sprintf(`{"query": {"bool": {"must": [{"match": {"token": {"query": "%s","operator": "AND"}}}],"must_not":[{"exists": {"field": "type"}}]}}}`, td.Token) - resultsCount, err := ei.elasticClient.DoCountRequest(index, []byte(query)) + resultsCount, err := ei.elasticClient.DoCountRequest(ctxWithValue, index, []byte(query)) if err != nil || resultsCount == 0 { return err } - err = ei.elasticClient.DoScrollRequest(index, []byte(query), false, handlerFunc) + ctxWithValue = context.WithValue(context.Background(), request.ContextKey, request.ExtendTopicWithShardID(request.ScrollTopic, shardID)) + err = ei.elasticClient.DoScrollRequest(ctxWithValue, index, []byte(query), false, handlerFunc) if err != nil { return err } diff --git a/process/elasticproc/validators/serialize.go b/process/elasticproc/validators/serialize.go index eb662a5d..f683b839 100644 --- a/process/elasticproc/validators/serialize.go +++ b/process/elasticproc/validators/serialize.go @@ -4,43 +4,24 @@ import ( "bytes" "encoding/json" "fmt" - "strings" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" ) -// SerializeValidatorsPubKeys will serialize validators public keys -func (vp *validatorsProcessor) SerializeValidatorsPubKeys(validatorsPubKeys *data.ValidatorsPublicKeys) (*bytes.Buffer, error) { - marshalizedValidatorPubKeys, err := json.Marshal(validatorsPubKeys) - if err != nil { - return nil, err - } - - buffer := &bytes.Buffer{} - buffer.Grow(len(marshalizedValidatorPubKeys)) - _, err = buffer.Write(marshalizedValidatorPubKeys) - if err != nil { - return nil, err - } - - return buffer, nil -} - // SerializeValidatorsRating will serialize validators rating -func (vp *validatorsProcessor) SerializeValidatorsRating( - index string, - validatorsRatingInfo []*data.ValidatorRatingInfo, -) ([]*bytes.Buffer, error) { +func (vp *validatorsProcessor) SerializeValidatorsRating(ratingData *outport.ValidatorsRating) ([]*bytes.Buffer, error) { buffSlice := data.NewBufferSlice(vp.bulkSizeMaxSize) - // inside mx-chain-go, the index is "shardID_epoch" so in order to keep backwards compatibility some adjustments have to be made. - // shardID from index name has to be removed because it is sufficient to have document id = blsKey_epoch - indexWithoutShardID := removeShardIDFromIndex(index) - for _, valRatingInfo := range validatorsRatingInfo { - id := fmt.Sprintf("%s_%s", valRatingInfo.PublicKey, indexWithoutShardID) + for _, ratingInfo := range ratingData.ValidatorsRatingInfo { + id := fmt.Sprintf("%s_%d", ratingInfo.PublicKey, ratingData.Epoch) meta := []byte(fmt.Sprintf(`{ "index" : { "_id" : "%s" } }%s`, id, "\n")) - serializedData, err := json.Marshal(valRatingInfo) + validatorRatingInfo := &data.ValidatorRatingInfo{ + PublicKey: ratingInfo.PublicKey, + Rating: ratingInfo.Rating, + } + serializedData, err := json.Marshal(validatorRatingInfo) if err != nil { continue } @@ -53,12 +34,3 @@ func (vp *validatorsProcessor) SerializeValidatorsRating( return buffSlice.Buffers(), nil } - -func removeShardIDFromIndex(index string) string { - splitIndex := strings.Split(index, "_") - if len(splitIndex) == 2 { - return splitIndex[1] - } - - return index -} diff --git a/process/elasticproc/validators/serialize_test.go b/process/elasticproc/validators/serialize_test.go index a05653b6..8ebea109 100644 --- a/process/elasticproc/validators/serialize_test.go +++ b/process/elasticproc/validators/serialize_test.go @@ -3,36 +3,28 @@ package validators import ( "testing" - "github.com/multiversx/mx-chain-es-indexer-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/stretchr/testify/require" ) -func TestValidatorsProcessor_SerializeValidatorsPubKeys(t *testing.T) { - t.Parallel() - - validatorsPubKeys := &data.ValidatorsPublicKeys{ - PublicKeys: []string{"bls1", "bls2"}, - } - buff, err := (&validatorsProcessor{}).SerializeValidatorsPubKeys(validatorsPubKeys) - require.Nil(t, err) - - expected := `{"publicKeys":["bls1","bls2"]}` - require.Equal(t, expected, buff.String()) -} - func TestValidatorsProcessor_SerializeValidatorsRating(t *testing.T) { t.Parallel() - buff, err := (&validatorsProcessor{}).SerializeValidatorsRating("0", []*data.ValidatorRatingInfo{ - { - PublicKey: "bls1", - Rating: 50.1, - }, - { - PublicKey: "bls3", - Rating: 50.2, + ratingInfo := &outport.ValidatorsRating{ + ShardID: 0, + Epoch: 0, + ValidatorsRatingInfo: []*outport.ValidatorRatingInfo{ + { + PublicKey: "bls1", + Rating: 50.1, + }, + { + PublicKey: "bls3", + Rating: 50.2, + }, }, - }) + } + buff, err := (&validatorsProcessor{}).SerializeValidatorsRating(ratingInfo) require.Nil(t, err) expected := `{ "index" : { "_id" : "bls1_0" } } {"rating":50.1} @@ -41,16 +33,3 @@ func TestValidatorsProcessor_SerializeValidatorsRating(t *testing.T) { ` require.Equal(t, expected, buff[0].String()) } - -func TestRemoveShardIDFromIndex(t *testing.T) { - t.Parallel() - - index := "2_1230" - require.Equal(t, "1230", removeShardIDFromIndex(index)) - - index = "1230" - require.Equal(t, index, removeShardIDFromIndex(index)) - - index = "4294967295_0" - require.Equal(t, "0", removeShardIDFromIndex(index)) -} diff --git a/process/elasticproc/validators/validatorsProcessor.go b/process/elasticproc/validators/validatorsProcessor.go index ee4e2b4c..4931b7ff 100644 --- a/process/elasticproc/validators/validatorsProcessor.go +++ b/process/elasticproc/validators/validatorsProcessor.go @@ -1,8 +1,13 @@ package validators import ( + "bytes" + "encoding/json" + "fmt" + "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/data" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" ) @@ -24,17 +29,43 @@ func NewValidatorsProcessor(validatorPubkeyConverter core.PubkeyConverter, bulkS }, nil } -// PrepareValidatorsPublicKeys will prepare validators public keys -func (vp *validatorsProcessor) PrepareValidatorsPublicKeys(shardValidatorsPubKeys [][]byte) *data.ValidatorsPublicKeys { - validatorsPubKeys := &data.ValidatorsPublicKeys{ +// PrepareAnSerializeValidatorsPubKeys will prepare validators public keys and serialize them +func (vp *validatorsProcessor) PrepareAnSerializeValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) ([]*bytes.Buffer, error) { + buffSlice := data.NewBufferSlice(vp.bulkSizeMaxSize) + + for shardID, validatorPk := range validatorsPubKeys.ShardValidatorsPubKeys { + err := vp.prepareAndSerializeValidatorsKeysForShard(shardID, validatorsPubKeys.Epoch, validatorPk.Keys, buffSlice) + if err != nil { + return nil, err + } + } + + return buffSlice.Buffers(), nil +} + +func (vp *validatorsProcessor) prepareAndSerializeValidatorsKeysForShard(shardID uint32, epoch uint32, keys [][]byte, buffSlice *data.BufferSlice) error { + preparedValidatorsPubKeys := &data.ValidatorsPublicKeys{ PublicKeys: make([]string, 0), } - for _, validatorPk := range shardValidatorsPubKeys { - strValidatorPk := vp.validatorPubkeyConverter.Encode(validatorPk) + for _, key := range keys { + // it will never throw an error here + strValidatorPk, _ := vp.validatorPubkeyConverter.Encode(key) + preparedValidatorsPubKeys.PublicKeys = append(preparedValidatorsPubKeys.PublicKeys, strValidatorPk) + } + + id := fmt.Sprintf("%d_%d", shardID, epoch) + meta := []byte(fmt.Sprintf(`{ "index" : { "_id" : "%s" } }%s`, id, "\n")) + + serializedData, err := json.Marshal(preparedValidatorsPubKeys) + if err != nil { + return err + } - validatorsPubKeys.PublicKeys = append(validatorsPubKeys.PublicKeys, strValidatorPk) + err = buffSlice.PutData(meta, serializedData) + if err != nil { + return err } - return validatorsPubKeys + return nil } diff --git a/process/elasticproc/validators/validatorsProcessor_test.go b/process/elasticproc/validators/validatorsProcessor_test.go index 078b2433..adb8154b 100644 --- a/process/elasticproc/validators/validatorsProcessor_test.go +++ b/process/elasticproc/validators/validatorsProcessor_test.go @@ -3,7 +3,7 @@ package validators import ( "testing" - "github.com/multiversx/mx-chain-es-indexer-go/data" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-es-indexer-go/mock" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" "github.com/stretchr/testify/require" @@ -17,18 +17,22 @@ func TestNewValidatorsProcessor(t *testing.T) { require.Equal(t, dataindexer.ErrNilPubkeyConverter, err) } -func TestValidatorsProcessor_PrepareValidatorsPublicKeys(t *testing.T) { +func TestValidatorsProcessor_PrepareAnSerializeValidatorsPubKeys(t *testing.T) { t.Parallel() - vp, _ := NewValidatorsProcessor(&mock.PubkeyConverterMock{}, 0) + vp, err := NewValidatorsProcessor(&mock.PubkeyConverterMock{}, 0) + require.Nil(t, err) - blsKeys := [][]byte{ - []byte("key1"), []byte("key2"), - } - res := vp.PrepareValidatorsPublicKeys(blsKeys) - require.Equal(t, &data.ValidatorsPublicKeys{ - PublicKeys: []string{ - "6b657931", "6b657932", + validators := &outport.ValidatorsPubKeys{ + Epoch: 30, + ShardValidatorsPubKeys: map[uint32]*outport.PubKeys{ + 0: {Keys: [][]byte{[]byte("k1"), []byte("k2")}}, }, - }, res) + } + res, err := vp.PrepareAnSerializeValidatorsPubKeys(validators) + require.Nil(t, err) + require.Len(t, res, 1) + require.Equal(t, `{ "index" : { "_id" : "0_30" } } +{"publicKeys":["6b31","6b32"]} +`, res[0].String()) } diff --git a/process/factory/indexerFactory.go b/process/factory/indexerFactory.go index 8b1449c1..0666edf7 100644 --- a/process/factory/indexerFactory.go +++ b/process/factory/indexerFactory.go @@ -9,11 +9,15 @@ import ( "github.com/elastic/go-elasticsearch/v7" "github.com/multiversx/mx-chain-core-go/core" "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-core-go/data/block" "github.com/multiversx/mx-chain-core-go/hashing" "github.com/multiversx/mx-chain-core-go/marshal" "github.com/multiversx/mx-chain-es-indexer-go/client" "github.com/multiversx/mx-chain-es-indexer-go/client/logging" + "github.com/multiversx/mx-chain-es-indexer-go/client/transport" + indexerCore "github.com/multiversx/mx-chain-es-indexer-go/core" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" + "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc" "github.com/multiversx/mx-chain-es-indexer-go/process/elasticproc/factory" logger "github.com/multiversx/mx-chain-logger-go" ) @@ -25,7 +29,7 @@ var log = logger.GetOrCreate("indexer/factory") type ArgsIndexerFactory struct { Enabled bool UseKibana bool - IndexerCacheSize int + ImportDB bool Denomination int BulkRequestMaxSize int Url string @@ -33,10 +37,12 @@ type ArgsIndexerFactory struct { Password string TemplatesPath string EnabledIndexes []string + HeaderMarshaller marshal.Marshalizer Marshalizer marshal.Marshalizer Hasher hashing.Hasher AddressPubkeyConverter core.PubkeyConverter ValidatorPubkeyConverter core.PubkeyConverter + StatusMetrics indexerCore.StatusMetricsHandler } // NewIndexer will create a new instance of Indexer @@ -51,17 +57,15 @@ func NewIndexer(args ArgsIndexerFactory) (dataindexer.Indexer, error) { return nil, err } - dispatcher, err := dataindexer.NewDataDispatcher(args.IndexerCacheSize) + blockContainer, err := createBlockCreatorsContainer() if err != nil { return nil, err } - dispatcher.StartIndexData() - arguments := dataindexer.ArgDataIndexer{ - Marshalizer: args.Marshalizer, + HeaderMarshaller: args.HeaderMarshaller, ElasticProcessor: elasticProcessor, - DataDispatcher: dispatcher, + BlockContainer: blockContainer, } return dataindexer.NewDataIndexer(arguments) @@ -75,14 +79,7 @@ func retryBackOff(attempt int) time.Duration { } func createElasticProcessor(args ArgsIndexerFactory) (dataindexer.ElasticProcessor, error) { - databaseClient, err := client.NewElasticClient(elasticsearch.Config{ - Addresses: []string{args.Url}, - Username: args.UserName, - Password: args.Password, - Logger: &logging.CustomLogger{}, - RetryOnStatus: []int{http.StatusConflict}, - RetryBackoff: retryBackOff, - }) + databaseClient, err := createElasticClient(args) if err != nil { return nil, err } @@ -97,15 +94,36 @@ func createElasticProcessor(args ArgsIndexerFactory) (dataindexer.ElasticProcess Denomination: args.Denomination, EnabledIndexes: args.EnabledIndexes, BulkRequestMaxSize: args.BulkRequestMaxSize, + ImportDB: args.ImportDB, } return factory.CreateElasticProcessor(argsElasticProcFac) } -func checkDataIndexerParams(arguments ArgsIndexerFactory) error { - if arguments.IndexerCacheSize < 0 { - return dataindexer.ErrNegativeCacheSize +func createElasticClient(args ArgsIndexerFactory) (elasticproc.DatabaseClientHandler, error) { + argsEsClient := elasticsearch.Config{ + Addresses: []string{args.Url}, + Username: args.UserName, + Password: args.Password, + Logger: &logging.CustomLogger{}, + RetryOnStatus: []int{http.StatusConflict}, + RetryBackoff: retryBackOff, } + + if check.IfNil(args.StatusMetrics) { + return client.NewElasticClient(argsEsClient) + } + + transportMetrics, err := transport.NewMetricsTransport(args.StatusMetrics) + if err != nil { + return nil, err + } + argsEsClient.Transport = transportMetrics + + return client.NewElasticClient(argsEsClient) +} + +func checkDataIndexerParams(arguments ArgsIndexerFactory) error { if check.IfNil(arguments.AddressPubkeyConverter) { return fmt.Errorf("%w when setting AddressPubkeyConverter in indexer", dataindexer.ErrNilPubkeyConverter) } @@ -121,6 +139,27 @@ func checkDataIndexerParams(arguments ArgsIndexerFactory) error { if check.IfNil(arguments.Hasher) { return dataindexer.ErrNilHasher } + if check.IfNil(arguments.HeaderMarshaller) { + return fmt.Errorf("%w: header marshaller", dataindexer.ErrNilMarshalizer) + } return nil } + +func createBlockCreatorsContainer() (dataindexer.BlockContainerHandler, error) { + container := block.NewEmptyBlockCreatorsContainer() + err := container.Add(core.ShardHeaderV1, block.NewEmptyHeaderCreator()) + if err != nil { + return nil, err + } + err = container.Add(core.ShardHeaderV2, block.NewEmptyHeaderV2Creator()) + if err != nil { + return nil, err + } + err = container.Add(core.MetaHeader, block.NewEmptyMetaBlockCreator()) + if err != nil { + return nil, err + } + + return container, nil +} diff --git a/process/factory/indexerFactory_test.go b/process/factory/indexerFactory_test.go index 43043541..b6b35110 100644 --- a/process/factory/indexerFactory_test.go +++ b/process/factory/indexerFactory_test.go @@ -16,11 +16,11 @@ func createMockIndexerFactoryArgs() ArgsIndexerFactory { return ArgsIndexerFactory{ Enabled: true, - IndexerCacheSize: 100, Url: ts.URL, UserName: "", Password: "", Marshalizer: &mock.MarshalizerMock{}, + HeaderMarshaller: &mock.MarshalizerMock{}, Hasher: &mock.HasherMock{}, AddressPubkeyConverter: mock.NewPubkeyConverterMock(32), ValidatorPubkeyConverter: &mock.PubkeyConverterMock{}, @@ -35,15 +35,6 @@ func TestNewIndexerFactory(t *testing.T) { argsFunc func() ArgsIndexerFactory exError error }{ - { - name: "InvalidCacheSize", - argsFunc: func() ArgsIndexerFactory { - args := createMockIndexerFactoryArgs() - args.IndexerCacheSize = -1 - return args - }, - exError: dataindexer.ErrNegativeCacheSize, - }, { name: "NilAddressPubkeyConverter", argsFunc: func() ArgsIndexerFactory { @@ -80,6 +71,15 @@ func TestNewIndexerFactory(t *testing.T) { }, exError: dataindexer.ErrNilHasher, }, + { + name: "NilHeaderMarshaller", + argsFunc: func() ArgsIndexerFactory { + args := createMockIndexerFactoryArgs() + args.HeaderMarshaller = nil + return args + }, + exError: dataindexer.ErrNilMarshalizer, + }, { name: "EmptyUrl", argsFunc: func() ArgsIndexerFactory { @@ -116,7 +116,6 @@ func TestIndexerFactoryCreate_ElasticIndexer(t *testing.T) { err = elasticIndexer.Close() require.NoError(t, err) - require.False(t, elasticIndexer.IsNilIndexer()) err = elasticIndexer.Close() require.NoError(t, err) diff --git a/process/wsclient/client.go b/process/wsclient/client.go deleted file mode 100644 index c1c3a105..00000000 --- a/process/wsclient/client.go +++ /dev/null @@ -1,174 +0,0 @@ -package wsclient - -import ( - "fmt" - "io" - "net/url" - "strings" - "time" - - "github.com/gorilla/websocket" - "github.com/multiversx/mx-chain-core-go/core/check" - "github.com/multiversx/mx-chain-core-go/data/typeConverters/uint64ByteSlice" - "github.com/multiversx/mx-chain-core-go/websocketOutportDriver" - "github.com/multiversx/mx-chain-core-go/websocketOutportDriver/data" - logger "github.com/multiversx/mx-chain-logger-go" -) - -const closedConnection = "use of closed network connection" - -type operationsHandler interface { - GetOperationsMap() map[data.OperationType]func(marshalledData []byte) error - Close() error -} - -type wsConn interface { - io.Closer - ReadMessage() (messageType int, p []byte, err error) - WriteMessage(messageType int, data []byte) error -} - -var ( - log = logger.GetOrCreate("process/wsclient") - retryDuration = time.Second * 5 -) - -type client struct { - urlReceive string - closeActions func() error - actions map[data.OperationType]func(marshalledData []byte) error - uint64ByteSliceConverter websocketOutportDriver.Uint64ByteSliceConverter - wsConnection wsConn -} - -// New will create a new instance of websocket client -func New( - urlReceive string, - operationsHandler operationsHandler, -) (*client, error) { - urlReceiveData := url.URL{Scheme: "ws", Host: urlReceive, Path: data.WSRoute} - - return &client{ - actions: operationsHandler.GetOperationsMap(), - closeActions: operationsHandler.Close, - urlReceive: urlReceiveData.String(), - uint64ByteSliceConverter: uint64ByteSlice.NewBigEndianConverter(), - }, nil -} - -// Start will initialize the connection to the server and start to listen for messages -func (c *client) Start() { - log.Info("connecting to", "url", c.urlReceive) - - for { - err := c.openConnection() - if err != nil { - log.Warn(fmt.Sprintf("c.openConnection(), retrying in %v...", retryDuration), "error", err.Error()) - time.Sleep(retryDuration) - continue - } - - closed := c.listeningOnWebSocket() - if closed { - return - } - } -} - -func (c *client) openConnection() error { - var err error - c.wsConnection, _, err = websocket.DefaultDialer.Dial(c.urlReceive, nil) - if err != nil { - return err - } - - return nil -} - -func (c *client) listeningOnWebSocket() (closed bool) { - for { - _, message, err := c.wsConnection.ReadMessage() - if err == nil { - c.verifyPayloadAndSendAckIfNeeded(message) - continue - } - - _, isConnectionClosed := err.(*websocket.CloseError) - if !isConnectionClosed { - if strings.Contains(err.Error(), closedConnection) { - return true - } - log.Warn("c.listeningOnWebSocket()-> connection problem, retrying", "error", err.Error()) - } else { - log.Warn(fmt.Sprintf("websocket terminated by the server side, retrying in %v...", retryDuration), "error", err.Error()) - } - return - } - -} - -func (c *client) verifyPayloadAndSendAckIfNeeded(payload []byte) { - if len(payload) == 0 { - log.Error("empty payload") - return - } - - payloadParser, _ := websocketOutportDriver.NewWebSocketPayloadParser(c.uint64ByteSliceConverter) - payloadData, err := payloadParser.ExtractPayloadData(payload) - if err != nil { - log.Error("error while extracting payload data: " + err.Error()) - return - } - - log.Info("processing payload", - "counter", payloadData.Counter, - "operation type", payloadData.OperationType, - "message length", len(payloadData.Payload), - ) - - function, ok := c.actions[payloadData.OperationType] - if !ok { - log.Warn("invalid operation", "operation type", payloadData.OperationType.String()) - } - - err = function(payloadData.Payload) - if err != nil { - log.Error("something went wrong", "error", err.Error()) - } - - if payloadData.WithAcknowledge { - counterBytes := c.uint64ByteSliceConverter.ToByteSlice(payloadData.Counter) - err = c.wsConnection.WriteMessage(websocket.BinaryMessage, counterBytes) - if err != nil { - log.Error("write acknowledge message", "error", err.Error()) - } - } -} - -func (c *client) closeWsConnection() { - log.Debug("closing ws connection...") - if check.IfNilReflect(c.wsConnection) { - return - } - - //Cleanly close the connection by sending a close message and then - //waiting (with timeout) for the server to close the connection. - err := c.wsConnection.WriteMessage(websocket.CloseMessage, websocket.FormatCloseMessage(websocket.CloseNormalClosure, "")) - if err != nil { - log.Error("cannot send close message", "error", err) - } - err = c.wsConnection.Close() - if err != nil { - log.Error("cannot close ws connection", "error", err) - } -} - -func (c *client) Close() { - log.Info("closing all components...") - c.closeWsConnection() - - err := c.closeActions() - if err != nil { - log.Error("cannot close the operations handler", "error", err) - } -} diff --git a/process/wsindexer/indexer.go b/process/wsindexer/indexer.go index c2e4382c..42dcfac5 100644 --- a/process/wsindexer/indexer.go +++ b/process/wsindexer/indexer.go @@ -2,10 +2,14 @@ package wsindexer import ( "errors" + "fmt" + "time" "github.com/multiversx/mx-chain-core-go/core/check" + "github.com/multiversx/mx-chain-core-go/data/outport" "github.com/multiversx/mx-chain-core-go/marshal" - "github.com/multiversx/mx-chain-core-go/websocketOutportDriver/data" + "github.com/multiversx/mx-chain-es-indexer-go/core" + "github.com/multiversx/mx-chain-es-indexer-go/metrics" "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" logger "github.com/multiversx/mx-chain-logger-go" ) @@ -15,102 +19,178 @@ var ( errNilDataIndexer = errors.New("nil data indexer") ) +// ArgsIndexer holds all the components needed to create a new instance of indexer +type ArgsIndexer struct { + Marshaller marshal.Marshalizer + DataIndexer DataIndexer + StatusMetrics core.StatusMetricsHandler +} + type indexer struct { - marshaller marshal.Marshalizer - di DataIndexer + marshaller marshal.Marshalizer + di DataIndexer + statusMetrics core.StatusMetricsHandler + actions map[string]func(marshalledData []byte) error } // NewIndexer will create a new instance of *indexer -func NewIndexer(marshaller marshal.Marshalizer, dataIndexer DataIndexer) (*indexer, error) { - if check.IfNil(marshaller) { +func NewIndexer(args ArgsIndexer) (*indexer, error) { + if check.IfNil(args.Marshaller) { return nil, dataindexer.ErrNilMarshalizer } - if check.IfNil(dataIndexer) { + if check.IfNil(args.DataIndexer) { return nil, errNilDataIndexer } + if check.IfNil(args.StatusMetrics) { + return nil, core.ErrNilMetricsHandler + } + + payloadIndexer := &indexer{ + marshaller: args.Marshaller, + di: args.DataIndexer, + statusMetrics: args.StatusMetrics, + } + payloadIndexer.initActionsMap() - return &indexer{ - marshaller: marshaller, - di: dataIndexer, - }, nil + return payloadIndexer, nil } // GetOperationsMap returns the map with all the operations that will index data -func (i *indexer) GetOperationsMap() map[data.OperationType]func(d []byte) error { - return map[data.OperationType]func(d []byte) error{ - data.OperationSaveBlock: i.saveBlock, - data.OperationRevertIndexedBlock: i.revertIndexedBlock, - data.OperationSaveRoundsInfo: i.saveRounds, - data.OperationSaveValidatorsRating: i.saveValidatorsRating, - data.OperationSaveValidatorsPubKeys: i.saveValidatorsPubKeys, - data.OperationSaveAccounts: i.saveAccounts, - data.OperationFinalizedBlock: i.finalizedBlock, +func (i *indexer) initActionsMap() { + i.actions = map[string]func(d []byte) error{ + outport.TopicSaveBlock: i.saveBlock, + outport.TopicRevertIndexedBlock: i.revertIndexedBlock, + outport.TopicSaveRoundsInfo: i.saveRounds, + outport.TopicSaveValidatorsRating: i.saveValidatorsRating, + outport.TopicSaveValidatorsPubKeys: i.saveValidatorsPubKeys, + outport.TopicSaveAccounts: i.saveAccounts, + outport.TopicFinalizedBlock: i.finalizedBlock, + outport.TopicSettings: i.setSettings, } } +// ProcessPayload will proces the provided payload based on the topic +func (i *indexer) ProcessPayload(payload []byte, topic string, version uint32) error { + if version != 1 { + log.Warn("received a payload with a different version", "version", version) + } + + payloadTypeAction, ok := i.actions[topic] + if !ok { + log.Warn("invalid payload type", "topic", topic) + return nil + } + + shardID, err := i.getShardID(payload) + if err != nil { + log.Warn("indexer.ProcessPayload: cannot get shardID from payload", "error", err) + } + + start := time.Now() + err = payloadTypeAction(payload) + duration := time.Since(start) + + topicKey := fmt.Sprintf("%s_%d", topic, shardID) + i.statusMetrics.AddIndexingData(metrics.ArgsAddIndexingData{ + GotError: err != nil, + MessageLen: uint64(len(payload)), + Topic: topicKey, + Duration: duration, + }) + + return err +} + func (i *indexer) saveBlock(marshalledData []byte) error { - argsSaveBlockS, err := i.getArgsSaveBlock(marshalledData) + outportBlock := &outport.OutportBlock{} + err := i.marshaller.Unmarshal(outportBlock, marshalledData) if err != nil { return err } - return i.di.SaveBlock(argsSaveBlockS) + return i.di.SaveBlock(outportBlock) } func (i *indexer) revertIndexedBlock(marshalledData []byte) error { - header, body, err := i.getHeaderAndBody(marshalledData) + blockData := &outport.BlockData{} + err := i.marshaller.Unmarshal(blockData, marshalledData) if err != nil { return err } - return i.di.RevertIndexedBlock(header, body) + return i.di.RevertIndexedBlock(blockData) } func (i *indexer) saveRounds(marshalledData []byte) error { - argsRounds := &data.ArgsSaveRoundsInfo{} - err := i.marshaller.Unmarshal(argsRounds, marshalledData) + roundsInfo := &outport.RoundsInfo{} + err := i.marshaller.Unmarshal(roundsInfo, marshalledData) if err != nil { return err } - return i.di.SaveRoundsInfo(argsRounds.RoundsInfos) + return i.di.SaveRoundsInfo(roundsInfo) } func (i *indexer) saveValidatorsRating(marshalledData []byte) error { - argsValidatorsRating := &data.ArgsSaveValidatorsRating{} - err := i.marshaller.Unmarshal(argsValidatorsRating, marshalledData) + ratingData := &outport.ValidatorsRating{} + err := i.marshaller.Unmarshal(ratingData, marshalledData) if err != nil { return err } - return i.di.SaveValidatorsRating(argsValidatorsRating.IndexID, argsValidatorsRating.InfoRating) + return i.di.SaveValidatorsRating(ratingData) } func (i *indexer) saveValidatorsPubKeys(marshalledData []byte) error { - argsValidators := &data.ArgsSaveValidatorsPubKeys{} - err := i.marshaller.Unmarshal(argsValidators, marshalledData) + validatorsPubKeys := &outport.ValidatorsPubKeys{} + err := i.marshaller.Unmarshal(validatorsPubKeys, marshalledData) if err != nil { return err } - return i.di.SaveValidatorsPubKeys(argsValidators.ValidatorsPubKeys, argsValidators.Epoch) + return i.di.SaveValidatorsPubKeys(validatorsPubKeys) } func (i *indexer) saveAccounts(marshalledData []byte) error { - argsSaveAccounts := &data.ArgsSaveAccounts{} - err := i.marshaller.Unmarshal(argsSaveAccounts, marshalledData) + accounts := &outport.Accounts{} + err := i.marshaller.Unmarshal(accounts, marshalledData) if err != nil { return err } - return i.di.SaveAccounts(argsSaveAccounts.BlockTimestamp, argsSaveAccounts.Acc, argsSaveAccounts.ShardID) + return i.di.SaveAccounts(accounts) } func (i *indexer) finalizedBlock(_ []byte) error { return nil } +func (i *indexer) setSettings(marshalledData []byte) error { + settings := outport.OutportConfig{} + err := i.marshaller.Unmarshal(&settings, marshalledData) + if err != nil { + return err + } + + return i.di.SetCurrentSettings(settings) +} + // Close will close the indexer func (i *indexer) Close() error { return i.di.Close() } + +// IsInterfaceNil returns true if underlying object is nil +func (i *indexer) IsInterfaceNil() bool { + return i == nil +} + +func (i *indexer) getShardID(payload []byte) (uint32, error) { + shard := &outport.Shard{} + err := i.marshaller.Unmarshal(shard, payload) + if err != nil { + return 0, err + } + + return shard.ShardID, nil +} diff --git a/process/wsindexer/interface.go b/process/wsindexer/interface.go index 59798a9e..1c23cc7d 100644 --- a/process/wsindexer/interface.go +++ b/process/wsindexer/interface.go @@ -1,25 +1,25 @@ package wsindexer import ( - "github.com/multiversx/mx-chain-core-go/data" "github.com/multiversx/mx-chain-core-go/data/outport" ) // WSClient defines what a websocket client should do type WSClient interface { - Start() - Close() + Send(message []byte, topic string) error + Close() error } // DataIndexer dines what a data indexer should do type DataIndexer interface { - SaveBlock(args *outport.ArgsSaveBlockData) error - RevertIndexedBlock(header data.HeaderHandler, body data.BodyHandler) error - SaveRoundsInfo(roundsInfos []*outport.RoundInfo) error - SaveValidatorsPubKeys(validatorsPubKeys map[uint32][][]byte, epoch uint32) error - SaveValidatorsRating(indexID string, infoRating []*outport.ValidatorRatingInfo) error - SaveAccounts(blockTimestamp uint64, acc map[string]*outport.AlteredAccount, shardID uint32) error - FinalizedBlock(headerHash []byte) error + SaveBlock(outportBlock *outport.OutportBlock) error + RevertIndexedBlock(blockData *outport.BlockData) error + SaveRoundsInfo(roundsInfos *outport.RoundsInfo) error + SaveValidatorsPubKeys(validatorsPubKeys *outport.ValidatorsPubKeys) error + SaveValidatorsRating(ratingData *outport.ValidatorsRating) error + SaveAccounts(accountsData *outport.Accounts) error + FinalizedBlock(finalizedBlock *outport.FinalizedBlock) error + SetCurrentSettings(settings outport.OutportConfig) error Close() error IsInterfaceNil() bool } diff --git a/process/wsindexer/types.go b/process/wsindexer/types.go deleted file mode 100644 index 47ae52b1..00000000 --- a/process/wsindexer/types.go +++ /dev/null @@ -1,43 +0,0 @@ -package wsindexer - -import ( - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-core-go/data/receipt" - "github.com/multiversx/mx-chain-core-go/data/rewardTx" - "github.com/multiversx/mx-chain-core-go/data/smartContractResult" - "github.com/multiversx/mx-chain-core-go/data/transaction" -) - -type normalTxWrapped struct { - TransactionHandler *transaction.Transaction - outport.FeeInfo -} -type rewardsTxsWrapped struct { - TransactionHandler *rewardTx.RewardTx - outport.FeeInfo -} -type scrWrapped struct { - TransactionHandler *smartContractResult.SmartContractResult - outport.FeeInfo -} -type receiptWrapped struct { - TransactionHandler *receipt.Receipt - outport.FeeInfo -} -type logWrapped struct { - TxHash string - LogHandler *transaction.Log -} - -type poolStruct struct { - Txs map[string]*normalTxWrapped - Invalid map[string]*normalTxWrapped - Scrs map[string]*scrWrapped - Rewards map[string]*rewardsTxsWrapped - Receipts map[string]*receiptWrapped - Logs []*logWrapped -} - -type argsSaveBlock struct { - TransactionsPool *poolStruct -} diff --git a/process/wsindexer/unmarshal.go b/process/wsindexer/unmarshal.go deleted file mode 100644 index ed7806c5..00000000 --- a/process/wsindexer/unmarshal.go +++ /dev/null @@ -1,192 +0,0 @@ -package wsindexer - -import ( - "encoding/hex" - "errors" - "time" - - "github.com/multiversx/mx-chain-core-go/core" - "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/outport" -) - -func (i *indexer) getArgsSaveBlock(marshaledData []byte) (*outport.ArgsSaveBlockData, error) { - defer func(start time.Time) { - log.Debug("indexer.getArgsSaveBlock", "duration", time.Since(start)) - }(time.Now()) - - header, err := i.getHeader(marshaledData) - if err != nil { - return nil, err - } - - body, err := i.getBody(marshaledData) - if err != nil { - return nil, err - } - - txsPool, err := i.getTxsPool(marshaledData) - if err != nil { - return nil, err - } - - argsBlockS := struct { - HeaderHash []byte - SignersIndexes []uint64 - NotarizedHeadersHashes []string - HeaderGasConsumption outport.HeaderGasConsumption - AlteredAccounts map[string]*outport.AlteredAccount - NumberOfShards uint32 - IsImportDB bool - }{} - err = i.marshaller.Unmarshal(&argsBlockS, marshaledData) - if err != nil { - return nil, err - } - - return &outport.ArgsSaveBlockData{ - HeaderHash: argsBlockS.HeaderHash, - Body: body, - Header: header, - SignersIndexes: argsBlockS.SignersIndexes, - NotarizedHeadersHashes: argsBlockS.NotarizedHeadersHashes, - HeaderGasConsumption: argsBlockS.HeaderGasConsumption, - TransactionsPool: txsPool, - AlteredAccounts: argsBlockS.AlteredAccounts, - NumberOfShards: argsBlockS.NumberOfShards, - IsImportDB: argsBlockS.IsImportDB, - }, nil -} - -func (i *indexer) getTxsPool(marshaledData []byte) (*outport.Pool, error) { - argSaveBlock := argsSaveBlock{} - - err := i.marshaller.Unmarshal(&argSaveBlock, marshaledData) - if err != nil { - return nil, err - } - - normalTxs := make(map[string]data.TransactionHandlerWithGasUsedAndFee, len(argSaveBlock.TransactionsPool.Txs)) - for txHash, tx := range argSaveBlock.TransactionsPool.Txs { - decoded := getDecodedHash(txHash) - - normalTxs[decoded] = outport.NewTransactionHandlerWithGasAndFee(tx.TransactionHandler, tx.GasUsed, tx.Fee) - normalTxs[decoded].SetInitialPaidFee(tx.InitialPaidFee) - } - - invalidTxs := make(map[string]data.TransactionHandlerWithGasUsedAndFee, len(argSaveBlock.TransactionsPool.Invalid)) - for txHash, tx := range argSaveBlock.TransactionsPool.Invalid { - decoded := getDecodedHash(txHash) - - invalidTxs[decoded] = outport.NewTransactionHandlerWithGasAndFee(tx.TransactionHandler, tx.GasUsed, tx.Fee) - invalidTxs[decoded].SetInitialPaidFee(tx.InitialPaidFee) - } - - scrs := make(map[string]data.TransactionHandlerWithGasUsedAndFee, len(argSaveBlock.TransactionsPool.Scrs)) - for txHash, tx := range argSaveBlock.TransactionsPool.Scrs { - decoded := getDecodedHash(txHash) - - scrs[decoded] = outport.NewTransactionHandlerWithGasAndFee(tx.TransactionHandler, tx.GasUsed, tx.Fee) - scrs[decoded].SetInitialPaidFee(tx.InitialPaidFee) - } - - receipts := make(map[string]data.TransactionHandlerWithGasUsedAndFee, len(argSaveBlock.TransactionsPool.Receipts)) - for txHash, tx := range argSaveBlock.TransactionsPool.Receipts { - decoded := getDecodedHash(txHash) - - receipts[decoded] = outport.NewTransactionHandlerWithGasAndFee(tx.TransactionHandler, tx.GasUsed, tx.Fee) - receipts[decoded].SetInitialPaidFee(tx.InitialPaidFee) - } - - rewards := make(map[string]data.TransactionHandlerWithGasUsedAndFee, len(argSaveBlock.TransactionsPool.Rewards)) - for txHash, tx := range argSaveBlock.TransactionsPool.Rewards { - decoded := getDecodedHash(txHash) - - rewards[decoded] = outport.NewTransactionHandlerWithGasAndFee(tx.TransactionHandler, tx.GasUsed, tx.Fee) - rewards[decoded].SetInitialPaidFee(tx.InitialPaidFee) - } - - logs := make([]*data.LogData, 0, len(argSaveBlock.TransactionsPool.Logs)) - for _, txLog := range argSaveBlock.TransactionsPool.Logs { - logs = append(logs, &data.LogData{ - LogHandler: txLog.LogHandler, - TxHash: getDecodedHash(txLog.TxHash), - }) - } - - return &outport.Pool{ - Txs: normalTxs, - Scrs: scrs, - Rewards: rewards, - Invalid: invalidTxs, - Receipts: receipts, - Logs: logs, - }, nil -} - -func getDecodedHash(hash string) string { - decoded, err := hex.DecodeString(hash) - if err != nil { - log.Warn("getDecodedHash.cannot decode hash", "error", err, "hash", hash) - return hash - } - return string(decoded) -} - -func (i *indexer) getHeaderAndBody(marshaledData []byte) (data.HeaderHandler, data.BodyHandler, error) { - body, err := i.getBody(marshaledData) - if err != nil { - return nil, nil, err - } - - header, err := i.getHeader(marshaledData) - if err != nil { - return nil, nil, err - } - - return header, body, nil -} - -func (i *indexer) getBody(marshaledData []byte) (data.BodyHandler, error) { - bodyStruct := struct { - Body *block.Body - }{} - - err := i.marshaller.Unmarshal(&bodyStruct, marshaledData) - return bodyStruct.Body, err -} - -func (i *indexer) getHeader(marshaledData []byte) (data.HeaderHandler, error) { - headerTypeStruct := struct { - HeaderType core.HeaderType - }{} - - err := i.marshaller.Unmarshal(&headerTypeStruct, marshaledData) - if err != nil { - return nil, err - } - - switch headerTypeStruct.HeaderType { - case core.MetaHeader: - hStruct := struct { - H1 *block.MetaBlock `json:"Header"` - }{} - err = i.marshaller.Unmarshal(&hStruct, marshaledData) - return hStruct.H1, err - case core.ShardHeaderV1: - hStruct := struct { - H1 *block.Header `json:"Header"` - }{} - err = i.marshaller.Unmarshal(&hStruct, marshaledData) - return hStruct.H1, err - case core.ShardHeaderV2: - hStruct := struct { - H1 *block.HeaderV2 `json:"Header"` - }{} - err = i.marshaller.Unmarshal(&hStruct, marshaledData) - return hStruct.H1, err - default: - return nil, errors.New("invalid header type") - } -} diff --git a/process/wsindexer/unmarshal_test.go b/process/wsindexer/unmarshal_test.go deleted file mode 100644 index 31c50f4f..00000000 --- a/process/wsindexer/unmarshal_test.go +++ /dev/null @@ -1,103 +0,0 @@ -package wsindexer - -import ( - "math/big" - "testing" - - "github.com/multiversx/mx-chain-core-go/core" - coreData "github.com/multiversx/mx-chain-core-go/data" - "github.com/multiversx/mx-chain-core-go/data/block" - "github.com/multiversx/mx-chain-core-go/data/outport" - "github.com/multiversx/mx-chain-core-go/data/receipt" - "github.com/multiversx/mx-chain-core-go/data/rewardTx" - "github.com/multiversx/mx-chain-core-go/data/smartContractResult" - "github.com/multiversx/mx-chain-core-go/data/transaction" - "github.com/multiversx/mx-chain-core-go/marshal/factory" - outportData "github.com/multiversx/mx-chain-core-go/websocketOutportDriver/data" - "github.com/multiversx/mx-chain-es-indexer-go/process/dataindexer" - "github.com/stretchr/testify/require" -) - -func TestGetHeaderAndBody(t *testing.T) { - t.Parallel() - - marshaller, _ := factory.NewMarshalizer("json") - nilDataIndexer := dataindexer.NewNilIndexer() - - di, _ := NewIndexer(marshaller, nilDataIndexer) - - arg := &outportData.ArgsRevertIndexedBlock{ - HeaderType: core.ShardHeaderV2, - Header: &block.HeaderV2{ScheduledRootHash: []byte("aaaaaa")}, - Body: &block.Body{MiniBlocks: []*block.MiniBlock{{}}}, - } - argBytes, _ := marshaller.Marshal(arg) - - body, header, err := di.getHeaderAndBody(argBytes) - require.Nil(t, err) - require.NotNil(t, body) - require.NotNil(t, header) -} - -func TestGetPool(t *testing.T) { - t.Parallel() - - marshaller, _ := factory.NewMarshalizer("json") - nilDataIndexer := dataindexer.NewNilIndexer() - - di, _ := NewIndexer(marshaller, nilDataIndexer) - - pool := &outport.Pool{ - Txs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "txHash": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - Nonce: 1, - }, 1, big.NewInt(100)), - }, - Scrs: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "scrHash": outport.NewTransactionHandlerWithGasAndFee(&smartContractResult.SmartContractResult{ - Nonce: 2, - }, 0, big.NewInt(0)), - }, - Rewards: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "reward": outport.NewTransactionHandlerWithGasAndFee(&rewardTx.RewardTx{ - Value: big.NewInt(10), - }, 0, big.NewInt(0)), - }, - Invalid: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "invalid": outport.NewTransactionHandlerWithGasAndFee(&transaction.Transaction{ - Nonce: 3, - }, 100, big.NewInt(1000)), - }, - Receipts: map[string]coreData.TransactionHandlerWithGasUsedAndFee{ - "rec": outport.NewTransactionHandlerWithGasAndFee(&receipt.Receipt{ - Value: big.NewInt(300), - }, 0, big.NewInt(0)), - }, - Logs: []*coreData.LogData{ - { - TxHash: "something", - LogHandler: &transaction.Log{ - Address: []byte("addr"), - }, - }, - }, - } - - argsSaveBlock := &outportData.ArgsSaveBlock{ - ArgsSaveBlockData: outport.ArgsSaveBlockData{ - TransactionsPool: pool, - }, - } - - argsSaveBlockBytes, _ := di.marshaller.Marshal(argsSaveBlock) - - resPool, err := di.getTxsPool(argsSaveBlockBytes) - require.Nil(t, err) - require.NotNil(t, resPool) - require.Equal(t, 1, len(resPool.Txs)) - require.Equal(t, 1, len(resPool.Scrs)) - require.Equal(t, 1, len(resPool.Rewards)) - require.Equal(t, 1, len(resPool.Receipts)) - require.Equal(t, 1, len(resPool.Invalid)) - require.Equal(t, 1, len(resPool.Logs)) -} diff --git a/scripts/observers/.env b/scripts/observers/.env index 95d2c92c..2797b76e 100644 --- a/scripts/observers/.env +++ b/scripts/observers/.env @@ -1,8 +1,23 @@ -NODE_CONFIG_URL="https://github.com/multiversx/mx-chain-testnet-config" -NODE_CONFIG_BRANCH="" +MX_CHAIN_DEPLOY_GO_URL=https://github.com/multiversx/mx-chain-deploy-go +MX_CHAIN_PROXY_URL=https://github.com/multiversx/mx-chain-proxy-go NODE_GO_URL="https://github.com/multiversx/mx-chain-go" -NODE_GO_BRANCH="" + +# TODO change the branch after tests +NODE_GO_BRANCH="extend-sc-deploy-and-upgrade-log" WORKING_DIRECTORY="IndexerObservers" OBSERVER_DIR_PREFIX="observer_shard_" + +# marshaller types: `json` or `gogo protobuf` +WS_MARSHALLER_TYPE="gogo protobuf" + +NUM_OF_SHARDS=3 #MAX 3 + +OBSERVERS_START_PORT=9500 + +PROXY_PORT=7950 + +ACK_TIMEOUT_IN_SECONDS=10 + +INDEXER_BINARY_SERVER=true diff --git a/scripts/observers/Makefile b/scripts/observers/Makefile new file mode 100644 index 00000000..d7239c3c --- /dev/null +++ b/scripts/observers/Makefile @@ -0,0 +1,30 @@ +# Help section +help: + @echo "Available targets:" + @echo " config - Configure the local testnet" + @echo " start - Start the local testnet" + @echo " stop - Stop the local testnet" + @echo " clean - Clean up the files of the local testnet" + @echo " cluster - Start an Elasticsearch cluster on port 9200" + @echo " cluster-stop - Stop the Elasticsearch cluster" + @echo " monitor-start - Start the Prometheus server (port 9090) and the Grafana server (port 3000)" + @echo " monitor-Stop - Stop the Prometheus server (port 9090) and the Grafana server (port 3000)" + +config: + python3 config.py +start: + python3 start.py +stop: + python3 stop.py +clean: + python3 clean.py + cd .. && /bin/bash script.sh delete +cluster: + cd .. && /bin/bash script.sh start +cluster-stop: + cd .. && /bin/bash script.sh stop +monitor-start: + cd .. && /bin/bash script.sh start_prometheus_and_grafana +monitor-stop: + cd .. && /bin/bash script.sh stop_prometheus_and_grafana + diff --git a/scripts/observers/config.py b/scripts/observers/config.py index dc23cf24..eefd6436 100644 --- a/scripts/observers/config.py +++ b/scripts/observers/config.py @@ -9,17 +9,45 @@ def update_toml_indexer(path, shard_id): # prefs.toml + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') path_prefs = path / "prefs.toml" prefs_data = toml.load(str(path_prefs)) - prefs_data['config']['web-socket']['server-url'] = str(shard_id) + + port = WS_PORT_BASE + shard_id + meta_port = WS_METACHAIN_PORT + if is_indexer_server: + port = WS_PORT_BASE + meta_port = WS_PORT_BASE + prefs_data['config']['web-socket']['mode'] = "server" + if shard_id != METACHAIN: - prefs_data['config']['web-socket']['server-url'] = "localhost:" + str(WS_PORT_BASE + shard_id) + prefs_data['config']['web-socket']['url'] = f"localhost:{str(port)}" else: - prefs_data['config']['web-socket']['server-url'] = "localhost:" + str(WS_METACHAIN_PORT) + prefs_data['config']['web-socket']['url'] = f"localhost:{str(meta_port)}" + prefs_data['config']['web-socket']['data-marshaller-type'] = str(os.getenv('WS_MARSHALLER_TYPE')) + prefs_data['config']['web-socket']['acknowledge-timeout-in-seconds'] = int(os.getenv('ACK_TIMEOUT_IN_SECONDS')) + f = open(path_prefs, 'w') toml.dump(prefs_data, f) f.close() + # api.toml + path_api = path / "api.toml" + api_data = toml.load(str(path_api)) + + api_port = API_PORT_BASE + shard_id + api_meta_port = API_META_PORT + if is_indexer_server: + api_port = API_PORT_BASE + api_meta_port = API_PORT_BASE + if shard_id != METACHAIN: + api_data['rest-api-interface'] = f":{api_port}" + else: + api_data['rest-api-interface'] = f":{api_meta_port}" + f = open(path_api, 'w') + toml.dump(api_data, f) + f.close() + def update_toml_node(path, shard_id): # prefs.toml @@ -30,14 +58,38 @@ def update_toml_node(path, shard_id): toml.dump(prefs_data, f) f.close() + # config.toml + num_of_shards = int(os.getenv('NUM_OF_SHARDS')) + path_config = path / "config.toml" + config_data = toml.load(path_config) + config_data['DbLookupExtensions']['Enabled'] = True + config_data['EpochStartConfig']['RoundsPerEpoch'] = 20 + config_data['GeneralSettings']['GenesisMaxNumberOfShards'] = num_of_shards + f = open(path_config, 'w') + toml.dump(config_data, f) + f.close() + # external.toml path_external = path / "external.toml" external_data = toml.load(str(path_external)) - external_data['WebSocketConnector']['Enabled'] = True + external_data['HostDriversConfig'][0]['Enabled'] = True + + port = WS_PORT_BASE + shard_id + meta_port = WS_METACHAIN_PORT + + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') + if is_indexer_server: + external_data['HostDriversConfig'][0]['Mode'] = "client" + port = WS_PORT_BASE + meta_port = WS_PORT_BASE + if shard_id != METACHAIN: - external_data['WebSocketConnector']['URL'] = "localhost:" + str(WS_PORT_BASE + shard_id) + external_data['HostDriversConfig'][0]['URL'] = f"localhost:{str(port)}" else: - external_data['WebSocketConnector']['URL'] = "localhost:" + str(WS_METACHAIN_PORT) + external_data['HostDriversConfig'][0]['URL'] = f"localhost:{str(meta_port)}" + + external_data['HostDriversConfig'][0]['MarshallerType'] = str(os.getenv('WS_MARSHALLER_TYPE')) + external_data['HostDriversConfig'][0]['AcknowledgeTimeoutInSec'] = int(os.getenv('ACK_TIMEOUT_IN_SECONDS')) f = open(path_external, 'w') toml.dump(external_data, f) f.close() @@ -72,48 +124,165 @@ def prepare_observer(shard_id, working_dir, config_folder): update_toml_indexer(indexer_config, shard_id) +def prepare_indexer_server(meta_id, working_dir): + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') + if not is_indexer_server: + return + + current_observer = str(os.getenv('OBSERVER_DIR_PREFIX')) + str(meta_id) + working_dir_observer = working_dir / current_observer + shutil.copytree(working_dir_observer / "indexer", working_dir / "indexer") + + +def generate_new_config(working_dir): + mx_chain_go_folder = working_dir / "mx-chain-go" / "scripts" / "testnet" + num_of_shards = str(os.getenv('NUM_OF_SHARDS')) + + with open(mx_chain_go_folder / "local.sh", "w") as file: + file.write(f'export SHARDCOUNT={num_of_shards}\n') + file.write("export SHARD_VALIDATORCOUNT=1\n") + file.write("export SHARD_OBSERVERCOUNT=0\n") + file.write("export SHARD_CONSENSUS_SIZE=1\n") + file.write("export META_VALIDATORCOUNT=1\n") + file.write("export META_OBSERVERCOUNT=0\n") + file.write("export META_CONSENSUS_SIZE=1\n") + file.write('export LOGLEVEL="*:DEBUG"\n') + file.write('export OBSERVERS_ANTIFLOOD_DISABLE=0\n') + file.write('export USETMUX=0\n') + file.write('export USE_PROXY=0\n') + + +def clone_mx_chain_go(working_dir): + print("cloning mx-chain-go....") + mx_chain_go_folder = working_dir / "mx-chain-go" + if not os.path.isdir(mx_chain_go_folder): + Repo.clone_from(os.getenv('NODE_GO_URL'), mx_chain_go_folder) + + repo_mx_chain_go = Repo(mx_chain_go_folder) + repo_mx_chain_go.git.checkout(os.getenv('NODE_GO_BRANCH')) + + +def clone_dependencies(working_dir): + print("cloning dependencies") + mx_chain_deploy_folder = working_dir / "mx-chain-deploy-go" + if not os.path.isdir(mx_chain_deploy_folder): + Repo.clone_from(os.getenv('MX_CHAIN_DEPLOY_GO_URL'), mx_chain_deploy_folder) + + mx_chain_proxy_folder = working_dir / "mx-chain-proxy-go" + if not os.path.isdir(mx_chain_proxy_folder): + Repo.clone_from(os.getenv('MX_CHAIN_PROXY_URL'), mx_chain_proxy_folder) + + +def prepare_seed_node(working_dir): + print("preparing seed node") + seed_node = Path.home() / "MultiversX/testnet/seednode" + shutil.copytree(seed_node, working_dir / "seednode") + + mx_chain_go_folder = working_dir / "mx-chain-go" + subprocess.check_call(["go", "build"], cwd=mx_chain_go_folder / "cmd/seednode") + + seed_node_exec = mx_chain_go_folder / "cmd/seednode/seednode" + shutil.copyfile(seed_node_exec, working_dir / "seednode/seednode") + + st = os.stat(working_dir / "seednode/seednode") + os.chmod(working_dir / "seednode/seednode", st.st_mode | stat.S_IEXEC) + + +def prepare_proxy(working_dir): + print("preparing proxy") + mx_chain_proxy_go_folder = working_dir / "mx-chain-proxy-go" + subprocess.check_call(["go", "build"], cwd=mx_chain_proxy_go_folder / "cmd/proxy") + + mx_chain_proxy_go_binary_folder = mx_chain_proxy_go_folder / "cmd/proxy" + st = os.stat(mx_chain_proxy_go_binary_folder / "proxy") + os.chmod(mx_chain_proxy_go_binary_folder / "proxy", st.st_mode | stat.S_IEXEC) + + # config.toml + path_config = mx_chain_proxy_go_binary_folder / "config/config.toml" + config_data = toml.load(str(path_config)) + + proxy_port = int(os.getenv('PROXY_PORT')) + config_data['GeneralSettings']['ServerPort'] = proxy_port + del config_data['Observers'] + del config_data['FullHistoryNodes'] + + config_data['Observers'] = [] + + observers_start_port = int(os.getenv('OBSERVERS_START_PORT')) + meta_observer = { + 'ShardId': 4294967295, + 'Address': f'http://127.0.0.1:{observers_start_port}', + } + config_data['Observers'].append(meta_observer) + + num_of_shards = int(os.getenv('NUM_OF_SHARDS')) + for shard_id in range(num_of_shards): + shard_observer_port = observers_start_port + shard_id + 1 + meta_observer = { + 'ShardId': shard_id, + 'Address': f'http://127.0.0.1:{shard_observer_port}', + } + config_data['Observers'].append(meta_observer) + + f = open(path_config, 'w') + toml.dump(config_data, f) + f.close() + + +def generate_config_for_local_testnet(working_dir): + mx_chain_local_testnet_scripts = working_dir / "mx-chain-go/scripts/testnet" + subprocess.check_call(["./clean.sh"], cwd=mx_chain_local_testnet_scripts) + subprocess.check_call(["./config.sh"], cwd=mx_chain_local_testnet_scripts) + + config_folder = Path.home() / "MultiversX/testnet/node/config" + os.rename(config_folder / "config_validator.toml", config_folder / "config.toml") + shutil.copytree(config_folder, working_dir / "config") + + def main(): load_dotenv() working_dir = get_working_dir() try: os.makedirs(working_dir) except FileExistsError: + print("something") print(f"working directory {working_dir} already exists") print("use `python3 clean.py` command first") sys.exit() - # CLONE config - print("cloning config....") - config_folder = working_dir / "config" - if not os.path.isdir(config_folder): - Repo.clone_from(os.getenv('NODE_CONFIG_URL'), config_folder) - - repo_cfg = Repo(config_folder) - repo_cfg.git.checkout(os.getenv('NODE_CONFIG_BRANCH')) - - # CLONE mx-chain-go - print("cloning mx-chain-go....") - mx_chain_go_folder = working_dir / "mx-chain-go" - if not os.path.isdir(mx_chain_go_folder): - Repo.clone_from(os.getenv('NODE_GO_URL'), mx_chain_go_folder) + num_of_shards = int(os.getenv('NUM_OF_SHARDS')) + check_num_of_shards(num_of_shards) - repo_mx_chain_go = Repo(mx_chain_go_folder) - repo_mx_chain_go.git.checkout(os.getenv('NODE_GO_BRANCH')) + # clone mx-chain-go + clone_mx_chain_go(working_dir) + # clone dependencies + clone_dependencies(working_dir) + # generate configs + generate_new_config(working_dir) + generate_config_for_local_testnet(working_dir) + # prepare seednode + prepare_seed_node(working_dir) + # prepare proxy + prepare_proxy(working_dir) # build binary mx-chain-go print("building node...") - subprocess.check_call(["go", "build"], cwd=mx_chain_go_folder / "cmd/node") + mx_chain_go_folder = working_dir / "mx-chain-go" + flags = '-gcflags=all=-N -l' + subprocess.check_call(["go", "build", flags], cwd=mx_chain_go_folder / "cmd/node") # build binary indexer print("building indexer...") - subprocess.check_call(["go", "build"], cwd="../../cmd/elasticindexer") + subprocess.check_call(["go", "build", flags], cwd="../../cmd/elasticindexer") # prepare observers + config_folder = working_dir / "config" print("preparing config...") - prepare_observer(0, working_dir, config_folder) - prepare_observer(1, working_dir, config_folder) - prepare_observer(2, working_dir, config_folder) prepare_observer(METACHAIN, working_dir, config_folder) + prepare_indexer_server(METACHAIN, working_dir) + + for shard_id in range(num_of_shards): + prepare_observer(shard_id, working_dir, config_folder) if __name__ == "__main__": diff --git a/scripts/observers/requirements.txt b/scripts/observers/requirements.txt index 0899c299..45c66bdf 100644 --- a/scripts/observers/requirements.txt +++ b/scripts/observers/requirements.txt @@ -1 +1,2 @@ -git \ No newline at end of file +python-dotenv +GitPython \ No newline at end of file diff --git a/scripts/observers/start.py b/scripts/observers/start.py index e57ff4de..4e6a865a 100644 --- a/scripts/observers/start.py +++ b/scripts/observers/start.py @@ -1,18 +1,46 @@ +import shutil + from dotenv import load_dotenv from utils import * -def start_observer(shard_id, working_dir): +def start_seed_node(working_dir): + current_directory = os.getcwd() + working_dir_seed_node = working_dir/"seednode" + os.chdir(working_dir_seed_node) + command = "./seednode" + os.system("screen -d -m -S seednode" + " " + command) + + os.chdir(current_directory) + + +def start_proxy(working_dir): + current_directory = os.getcwd() + + working_dir_proxy = working_dir/"mx-chain-proxy-go/cmd/proxy" + os.chdir(working_dir_proxy) + command = "./proxy" + os.system("screen -d -m -S proxy" + " " + command) + + os.chdir(current_directory) + + +def start_observer_and_indexer(shard_id, working_dir, sk_index): current_observer = str(os.getenv('OBSERVER_DIR_PREFIX')) + str(shard_id) working_dir_observer = working_dir / current_observer current_directory = os.getcwd() # start observer os.chdir(working_dir_observer / "node") - command = "./node" + " --log-level *:DEBUG --no-key --log-save" + observers_start_port = int(os.getenv('OBSERVERS_START_PORT')) + command = "./node" + " --log-level *:DEBUG --log-save --sk-index " + str(sk_index) + " --rest-api-interface :" + str(observers_start_port + sk_index) os.system("screen -d -m -S obs" + str(shard_id) + " " + command) # start indexer + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') + if is_indexer_server: + return + os.chdir(working_dir_observer / "indexer") command = "./elasticindexer" + " --log-level *:DEBUG --log-save" os.system("screen -d -m -S indexer" + str(shard_id) + " " + command) @@ -20,6 +48,15 @@ def start_observer(shard_id, working_dir): os.chdir(current_directory) +def start_indexer_server(working_dir): + current_directory = os.getcwd() + os.chdir(working_dir / "indexer") + command = "./elasticindexer" + " --log-level *:DEBUG --log-save" + os.system("screen -d -m -S indexer" + "server" + " " + command) + + os.chdir(current_directory) + + def main(): load_dotenv() working_dir = get_working_dir() @@ -27,12 +64,21 @@ def main(): print("working directory folder is missing...you should run first `python3 config.py` command") sys.exit() + num_of_shards = int(os.getenv('NUM_OF_SHARDS')) + check_num_of_shards(num_of_shards) + print("staring observers and indexers....") - start_observer(0, working_dir) - start_observer(1, working_dir) - start_observer(2, working_dir) - start_observer(METACHAIN, working_dir) + start_seed_node(working_dir) + start_proxy(working_dir) + start_observer_and_indexer(METACHAIN, working_dir, 0) + + for shard_id in range(num_of_shards): + start_observer_and_indexer(shard_id, working_dir, shard_id+1) + + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') + if is_indexer_server: + start_indexer_server(working_dir) print("done") diff --git a/scripts/observers/stop.py b/scripts/observers/stop.py index b2d6362f..95cc29ae 100644 --- a/scripts/observers/stop.py +++ b/scripts/observers/stop.py @@ -1,15 +1,29 @@ import os +from dotenv import load_dotenv + def main(): - os.system("screen -X -S obs0 quit") - os.system("screen -X -S obs1 quit") - os.system("screen -X -S obs2 quit") + load_dotenv() + + is_indexer_server = os.getenv('INDEXER_BINARY_SERVER') + + os.system("screen -X -S proxy quit") + os.system("screen -X -S seednode quit") + os.system("screen -X -S obs4294967295 quit") - os.system("screen -X -S indexer0 quit") - os.system("screen -X -S indexer1 quit") - os.system("screen -X -S indexer2 quit") - os.system("screen -X -S indexer4294967295 quit") + if not is_indexer_server: + os.system("screen -X -S indexer4294967295 quit") + + num_of_shards = int(os.getenv('NUM_OF_SHARDS')) + for shard_id in range(num_of_shards): + os.system(f'screen -X -S obs{shard_id} quit') + if not is_indexer_server: + os.system(f'screen -X -S indexer{shard_id} quit') + + if is_indexer_server: + os.system("screen -X -S indexerserver quit") + print("done") diff --git a/scripts/observers/utils.py b/scripts/observers/utils.py index 930a61dc..13ce6088 100644 --- a/scripts/observers/utils.py +++ b/scripts/observers/utils.py @@ -6,6 +6,10 @@ METACHAIN = 4294967295 WS_PORT_BASE = 22111 WS_METACHAIN_PORT = WS_PORT_BASE + 50 +MAX_NUM_OF_SHARDS = 3 + +API_PORT_BASE = 8081 +API_META_PORT = API_PORT_BASE + 50 def get_working_dir(): @@ -15,3 +19,10 @@ def get_working_dir(): sys.exit() return Path.home() / working_dir_var + + +def check_num_of_shards(num_of_shards): + if num_of_shards > MAX_NUM_OF_SHARDS: + print(f'the NUM_OF_SHARDS variable cannot be greater than {MAX_NUM_OF_SHARDS}') + sys.exit() + diff --git a/scripts/prometheus/prometheus.yml b/scripts/prometheus/prometheus.yml new file mode 100644 index 00000000..2b1e127f --- /dev/null +++ b/scripts/prometheus/prometheus.yml @@ -0,0 +1,16 @@ +global: + scrape_interval: 6s + scrape_timeout: 6s + evaluation_interval: 1m +scrape_configs: + - job_name: prometheus + honor_timestamps: true + scrape_interval: 6s + scrape_timeout: 6s + metrics_path: /status/prometheus-metrics + scheme: http + follow_redirects: true + enable_http2: true + static_configs: + - targets: + - 0.0.0.0:8081 diff --git a/scripts/script.sh b/scripts/script.sh index bba2b141..83b0867c 100755 --- a/scripts/script.sh +++ b/scripts/script.sh @@ -1,6 +1,10 @@ IMAGE_NAME=elastic-container DEFAULT_ES_VERSION=7.16.2 -INDICES_LIST=("rating" "transactions" "blocks" "validators" "miniblocks" "rounds" "accounts" "accountshistory" "receipts" "scresults" "accountsesdt" "accountsesdthistory" "epochinfo" "scdeploys" "tokens" "tags" "logs" "delegators" "operations", "esdts") +PROMETHEUS_CONTAINER_NAME=prometheus_container +GRAFANA_CONTAINER_NAME=grafana_container +GRAFANA_VERSION=10.0.3 +PROMETHEUS_VERSION=v2.46.0 +INDICES_LIST=("rating" "transactions" "blocks" "validators" "miniblocks" "rounds" "accounts" "accountshistory" "receipts" "scresults" "accountsesdt" "accountsesdthistory" "epochinfo" "scdeploys" "tokens" "tags" "logs" "delegators" "operations" "esdts") start() { @@ -58,4 +62,18 @@ stop_open_search() { docker stop "${IMAGE_OPEN_SEARCH}" } +start_prometheus_and_grafana() { + docker rm ${PROMETHEUS_CONTAINER_NAME} 2> /dev/null + docker rm ${GRAFANA_CONTAINER_NAME} 2> /dev/null + + PROMETHEUS_CONFIG_FOLDER=$(pwd)/prometheus + docker run --network="host" --name "${PROMETHEUS_CONTAINER_NAME}" -d -p 9090:9090 -v "${PROMETHEUS_CONFIG_FOLDER}/prometheus.yml":/etc/prometheus/prometheus.yml prom/prometheus:${PROMETHEUS_VERSION} + docker run --network="host" --name "${GRAFANA_CONTAINER_NAME}" -d -p 3000:3000 grafana/grafana:${GRAFANA_VERSION} +} + +stop_prometheus_and_grafana() { + docker stop "${PROMETHEUS_CONTAINER_NAME}" + docker stop "${GRAFANA_CONTAINER_NAME}" +} + "$@" diff --git a/templates/noKibana/deploys.go b/templates/noKibana/deploys.go index 791e8c01..1142e568 100644 --- a/templates/noKibana/deploys.go +++ b/templates/noKibana/deploys.go @@ -21,6 +21,12 @@ var SCDeploys = Object{ "type": "date", "format": "epoch_second", }, + "currentOwner": Object{ + "type": "keyword", + }, + "initialCodeHash": Object{ + "type": "keyword", + }, "upgrades": Object{ "type": "nested", "properties": Object{ @@ -34,6 +40,24 @@ var SCDeploys = Object{ "upgrader": Object{ "type": "keyword", }, + "codeHash": Object{ + "type": "keyword", + }, + }, + }, + "owners": Object{ + "type": "nested", + "properties": Object{ + "timestamp": Object{ + "type": "date", + "format": "epoch_second", + }, + "txHash": Object{ + "type": "keyword", + }, + "address": Object{ + "type": "keyword", + }, }, }, }, diff --git a/templates/noKibana/esdts.go b/templates/noKibana/esdts.go index c2d1e9a1..072d215a 100644 --- a/templates/noKibana/esdts.go +++ b/templates/noKibana/esdts.go @@ -49,6 +49,9 @@ var ESDTs = Object{ }, }, }, + "paused": Object{ + "type": "boolean", + }, "properties": Object{ "properties": Object{ "canMint": Object{ diff --git a/templates/withKibana/deploys.go b/templates/withKibana/deploys.go index 17e969bc..1f75d3db 100644 --- a/templates/withKibana/deploys.go +++ b/templates/withKibana/deploys.go @@ -21,6 +21,12 @@ var SCDeploys = Object{ "type": "date", "format": "epoch_second", }, + "currentOwner": Object{ + "type": "keyword", + }, + "initialCodeHash": Object{ + "type": "keyword", + }, "upgrades": Object{ "type": "nested", "properties": Object{ @@ -34,6 +40,24 @@ var SCDeploys = Object{ "upgrader": Object{ "type": "keyword", }, + "codeHash": Object{ + "type": "keyword", + }, + }, + }, + "owners": Object{ + "type": "nested", + "properties": Object{ + "timestamp": Object{ + "type": "date", + "format": "epoch_second", + }, + "txHash": Object{ + "type": "keyword", + }, + "address": Object{ + "type": "keyword", + }, }, }, }, diff --git a/templates/withKibana/esdts.go b/templates/withKibana/esdts.go index 466f8f33..cc27f793 100644 --- a/templates/withKibana/esdts.go +++ b/templates/withKibana/esdts.go @@ -49,6 +49,9 @@ var ESDTs = Object{ }, }, }, + "paused": Object{ + "type": "boolean", + }, "properties": Object{ "properties": Object{ "canMint": Object{