From b7c9dd96c9f0179274cc218f63f453fb5cf28fe0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 00:53:16 +0000 Subject: [PATCH] Bump the github-dependencies group with 7 updates Bumps the github-dependencies group with 7 updates: | Package | From | To | | --- | --- | --- | | [github.com/onsi/ginkgo/v2](https://github.com/onsi/ginkgo) | `2.13.1` | `2.13.2` | | [golang.org/x/net](https://github.com/golang/net) | `0.18.0` | `0.19.0` | | [golang.org/x/oauth2](https://github.com/golang/oauth2) | `0.14.0` | `0.15.0` | | [google.golang.org/api](https://github.com/googleapis/google-api-go-client) | `0.151.0` | `0.152.0` | | [cloud.google.com/go](https://github.com/googleapis/google-cloud-go) | `0.110.10` | `0.111.0` | | [github.com/google/cel-go](https://github.com/google/cel-go) | `0.16.1` | `0.18.2` | | [golang.org/x/tools](https://github.com/golang/tools) | `0.15.0` | `0.16.0` | Updates `github.com/onsi/ginkgo/v2` from 2.13.1 to 2.13.2 - [Release notes](https://github.com/onsi/ginkgo/releases) - [Changelog](https://github.com/onsi/ginkgo/blob/master/CHANGELOG.md) - [Commits](https://github.com/onsi/ginkgo/compare/v2.13.1...v2.13.2) Updates `golang.org/x/net` from 0.18.0 to 0.19.0 - [Commits](https://github.com/golang/net/compare/v0.18.0...v0.19.0) Updates `golang.org/x/oauth2` from 0.14.0 to 0.15.0 - [Commits](https://github.com/golang/oauth2/compare/v0.14.0...v0.15.0) Updates `google.golang.org/api` from 0.151.0 to 0.152.0 - [Release notes](https://github.com/googleapis/google-api-go-client/releases) - [Changelog](https://github.com/googleapis/google-api-go-client/blob/main/CHANGES.md) - [Commits](https://github.com/googleapis/google-api-go-client/compare/v0.151.0...v0.152.0) Updates `cloud.google.com/go` from 0.110.10 to 0.111.0 - [Release notes](https://github.com/googleapis/google-cloud-go/releases) - [Changelog](https://github.com/googleapis/google-cloud-go/blob/main/CHANGES.md) - [Commits](https://github.com/googleapis/google-cloud-go/compare/v0.110.10...v0.111.0) Updates `github.com/google/cel-go` from 0.16.1 to 0.18.2 - [Release notes](https://github.com/google/cel-go/releases) - [Commits](https://github.com/google/cel-go/compare/v0.16.1...v0.18.2) Updates `golang.org/x/tools` from 0.15.0 to 0.16.0 - [Release notes](https://github.com/golang/tools/releases) - [Commits](https://github.com/golang/tools/compare/v0.15.0...v0.16.0) --- updated-dependencies: - dependency-name: github.com/onsi/ginkgo/v2 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: github-dependencies - dependency-name: golang.org/x/net dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-dependencies - dependency-name: golang.org/x/oauth2 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-dependencies - dependency-name: google.golang.org/api dependency-type: direct:production update-type: version-update:semver-minor dependency-group: github-dependencies - dependency-name: cloud.google.com/go dependency-type: indirect update-type: version-update:semver-minor dependency-group: github-dependencies - dependency-name: github.com/google/cel-go dependency-type: indirect update-type: version-update:semver-minor dependency-group: github-dependencies - dependency-name: golang.org/x/tools dependency-type: indirect update-type: version-update:semver-minor dependency-group: github-dependencies ... Signed-off-by: dependabot[bot] --- go.mod | 24 +- go.sum | 48 +- .../go/internal/.repo-metadata-full.json | 40 + .../go/internal/trace/trace.go | 146 +- .../antlr/antlr4/runtime/Go/antlr/v4/LICENSE | 26 - .../antlr4/runtime/Go/antlr/v4/antlrdoc.go | 68 - .../antlr4/runtime/Go/antlr/v4/atn_config.go | 303 -- .../runtime/Go/antlr/v4/atn_config_set.go | 441 --- .../runtime/Go/antlr/v4/input_stream.go | 113 - .../antlr4/runtime/Go/antlr/v4/jcollect.go | 198 -- .../runtime/Go/antlr/v4/prediction_context.go | 806 ------ .../runtime/Go/antlr/v4/prediction_mode.go | 529 ---- .../runtime/Go/antlr/v4/rule_context.go | 114 - .../antlr4/runtime/Go/antlr/v4/utils_set.go | 235 -- .../github.com/antlr4-go/antlr/v4/.gitignore | 18 + vendor/github.com/antlr4-go/antlr/v4/LICENSE | 28 + .../github.com/antlr4-go/antlr/v4/README.md | 54 + .../github.com/antlr4-go/antlr/v4/antlrdoc.go | 102 + .../runtime/Go => antlr4-go}/antlr/v4/atn.go | 9 +- .../antlr4-go/antlr/v4/atn_config.go | 335 +++ .../antlr4-go/antlr/v4/atn_config_set.go | 301 ++ .../antlr/v4/atn_deserialization_options.go | 7 +- .../antlr/v4/atn_deserializer.go | 9 +- .../antlr/v4/atn_simulator.go | 17 +- .../Go => antlr4-go}/antlr/v4/atn_state.go | 224 +- .../Go => antlr4-go}/antlr/v4/atn_type.go | 0 .../Go => antlr4-go}/antlr/v4/char_stream.go | 2 +- .../antlr/v4/common_token_factory.go | 0 .../antlr/v4/common_token_stream.go | 39 +- .../Go => antlr4-go}/antlr/v4/comparators.go | 33 +- .../antlr4-go/antlr/v4/configuration.go | 214 ++ .../runtime/Go => antlr4-go}/antlr/v4/dfa.go | 47 +- .../antlr/v4/dfa_serializer.go | 2 +- .../Go => antlr4-go}/antlr/v4/dfa_state.go | 29 +- .../antlr/v4/diagnostic_error_listener.go | 11 +- .../antlr/v4/error_listener.go | 40 +- .../antlr/v4/error_strategy.go | 450 ++- .../Go => antlr4-go}/antlr/v4/errors.go | 73 +- .../Go => antlr4-go}/antlr/v4/file_stream.go | 46 +- .../antlr4-go/antlr/v4/input_stream.go | 157 ++ .../Go => antlr4-go}/antlr/v4/int_stream.go | 0 .../Go => antlr4-go}/antlr/v4/interval_set.go | 60 +- .../github.com/antlr4-go/antlr/v4/jcollect.go | 685 +++++ .../Go => antlr4-go}/antlr/v4/lexer.go | 68 +- .../Go => antlr4-go}/antlr/v4/lexer_action.go | 100 +- .../antlr/v4/lexer_action_executor.go | 61 +- .../antlr/v4/lexer_atn_simulator.go | 185 +- .../Go => antlr4-go}/antlr/v4/ll1_analyzer.go | 62 +- .../antlr4-go/antlr/v4/nostatistics.go | 47 + .../Go => antlr4-go}/antlr/v4/parser.go | 160 +- .../antlr/v4/parser_atn_simulator.go | 787 +++--- .../antlr/v4/parser_rule_context.go | 85 +- .../antlr4-go/antlr/v4/prediction_context.go | 727 +++++ .../antlr/v4/prediction_context_cache.go | 48 + .../antlr4-go/antlr/v4/prediction_mode.go | 536 ++++ .../Go => antlr4-go}/antlr/v4/recognizer.go | 65 +- .../antlr4-go/antlr/v4/rule_context.go | 40 + .../antlr/v4/semantic_context.go | 33 +- .../antlr4-go/antlr/v4/statistics.go | 281 ++ .../antlr4-go/antlr/v4/stats_data.go | 23 + .../Go => antlr4-go}/antlr/v4/token.go | 36 +- .../Go => antlr4-go}/antlr/v4/token_source.go | 0 .../Go => antlr4-go}/antlr/v4/token_stream.go | 3 +- .../antlr/v4/tokenstream_rewriter.go | 221 +- .../antlr/v4/trace_listener.go | 0 .../Go => antlr4-go}/antlr/v4/transition.go | 229 +- .../runtime/Go => antlr4-go}/antlr/v4/tree.go | 109 +- .../Go => antlr4-go}/antlr/v4/trees.go | 22 +- .../Go => antlr4-go}/antlr/v4/utils.go | 84 +- .../github.com/google/cel-go/cel/BUILD.bazel | 15 +- vendor/github.com/google/cel-go/cel/decls.go | 1087 +------- vendor/github.com/google/cel-go/cel/env.go | 404 ++- .../github.com/google/cel-go/cel/folding.go | 559 ++++ .../github.com/google/cel-go/cel/inlining.go | 240 ++ vendor/github.com/google/cel-go/cel/io.go | 60 +- .../github.com/google/cel-go/cel/library.go | 289 +- vendor/github.com/google/cel-go/cel/macro.go | 465 +++- .../github.com/google/cel-go/cel/optimizer.go | 482 ++++ .../github.com/google/cel-go/cel/options.go | 178 +- .../github.com/google/cel-go/cel/program.go | 105 +- .../github.com/google/cel-go/cel/validator.go | 375 +++ .../google/cel-go/checker/BUILD.bazel | 7 +- .../google/cel-go/checker/checker.go | 634 +++-- .../github.com/google/cel-go/checker/cost.go | 280 +- .../google/cel-go/checker/decls/BUILD.bazel | 1 - .../github.com/google/cel-go/checker/env.go | 264 +- .../google/cel-go/checker/errors.go | 94 +- .../google/cel-go/checker/format.go | 216 ++ .../google/cel-go/checker/mapping.go | 14 +- .../google/cel-go/checker/options.go | 13 +- .../google/cel-go/checker/printer.go | 35 +- .../cel-go/checker/{decls => }/scopes.go | 38 +- .../google/cel-go/checker/standard.go | 481 +--- .../github.com/google/cel-go/checker/types.go | 402 +-- .../google/cel-go/common/ast/BUILD.bazel | 61 + .../google/cel-go/common/ast/ast.go | 450 +++ .../google/cel-go/common/ast/conversion.go | 632 +++++ .../google/cel-go/common/ast/expr.go | 860 ++++++ .../google/cel-go/common/ast/factory.go | 303 ++ .../google/cel-go/common/ast/navigable.go | 652 +++++ .../cel-go/common/containers/BUILD.bazel | 4 +- .../cel-go/common/containers/container.go | 22 +- .../google/cel-go/common/debug/BUILD.bazel | 4 +- .../google/cel-go/common/debug/debug.go | 156 +- .../google/cel-go/common/decls/BUILD.bazel | 39 + .../google/cel-go/common/decls/decls.go | 844 ++++++ .../github.com/google/cel-go/common/error.go | 8 +- .../github.com/google/cel-go/common/errors.go | 18 +- .../cel-go/common/functions/BUILD.bazel | 17 + .../cel-go/common/functions/functions.go | 61 + .../github.com/google/cel-go/common/source.go | 3 - .../google/cel-go/common/stdlib/BUILD.bazel | 25 + .../google/cel-go/common/stdlib/standard.go | 661 +++++ .../google/cel-go/common/types/BUILD.bazel | 7 +- .../google/cel-go/common/types/bool.go | 6 - .../google/cel-go/common/types/bytes.go | 7 - .../google/cel-go/common/types/double.go | 10 - .../google/cel-go/common/types/duration.go | 68 +- .../google/cel-go/common/types/err.go | 7 +- .../google/cel-go/common/types/int.go | 11 - .../google/cel-go/common/types/iterator.go | 2 +- .../google/cel-go/common/types/list.go | 90 +- .../google/cel-go/common/types/map.go | 87 +- .../google/cel-go/common/types/null.go | 2 - .../google/cel-go/common/types/object.go | 18 +- .../google/cel-go/common/types/optional.go | 2 +- .../google/cel-go/common/types/pb/type.go | 23 +- .../google/cel-go/common/types/provider.go | 265 +- .../cel-go/common/types/ref/provider.go | 37 +- .../google/cel-go/common/types/string.go | 44 +- .../google/cel-go/common/types/timestamp.go | 10 - .../google/cel-go/common/types/type.go | 102 - .../google/cel-go/common/types/types.go | 806 ++++++ .../google/cel-go/common/types/uint.go | 10 - .../google/cel-go/common/types/unknown.go | 290 +- .../google/cel-go/common/types/util.go | 2 +- .../github.com/google/cel-go/ext/BUILD.bazel | 11 +- vendor/github.com/google/cel-go/ext/README.md | 31 + .../github.com/google/cel-go/ext/bindings.go | 28 +- .../github.com/google/cel-go/ext/encoders.go | 5 - .../google/cel-go/ext/formatting.go | 904 ++++++ vendor/github.com/google/cel-go/ext/guards.go | 10 +- vendor/github.com/google/cel-go/ext/lists.go | 94 + vendor/github.com/google/cel-go/ext/math.go | 82 +- vendor/github.com/google/cel-go/ext/native.go | 110 +- vendor/github.com/google/cel-go/ext/protos.go | 49 +- vendor/github.com/google/cel-go/ext/sets.go | 61 +- .../github.com/google/cel-go/ext/strings.go | 492 +--- .../google/cel-go/interpreter/BUILD.bazel | 9 +- .../google/cel-go/interpreter/activation.go | 2 +- .../cel-go/interpreter/attribute_patterns.go | 42 +- .../google/cel-go/interpreter/attributes.go | 52 +- .../google/cel-go/interpreter/decorators.go | 10 +- .../google/cel-go/interpreter/dispatcher.go | 2 +- .../google/cel-go/interpreter/evalstate.go | 6 +- .../google/cel-go/interpreter/formatting.go | 383 --- .../cel-go/interpreter/functions/BUILD.bazel | 9 +- .../cel-go/interpreter/functions/functions.go | 33 +- .../cel-go/interpreter/functions/standard.go | 270 -- .../cel-go/interpreter/interpretable.go | 311 ++- .../google/cel-go/interpreter/interpreter.go | 48 +- .../google/cel-go/interpreter/planner.go | 326 +-- .../google/cel-go/interpreter/prune.go | 493 ++-- .../google/cel-go/interpreter/runtimecost.go | 63 +- .../google/cel-go/parser/BUILD.bazel | 9 +- .../github.com/google/cel-go/parser/errors.go | 17 +- .../google/cel-go/parser/gen/BUILD.bazel | 2 +- .../cel-go/parser/gen/cel_base_listener.go | 4 +- .../cel-go/parser/gen/cel_base_visitor.go | 5 +- .../google/cel-go/parser/gen/cel_lexer.go | 603 ++-- .../google/cel-go/parser/gen/cel_listener.go | 5 +- .../google/cel-go/parser/gen/cel_parser.go | 2478 +++++++++++------ .../google/cel-go/parser/gen/cel_visitor.go | 8 +- .../google/cel-go/parser/gen/generate.sh | 2 +- .../github.com/google/cel-go/parser/helper.go | 645 ++--- .../github.com/google/cel-go/parser/input.go | 4 +- .../github.com/google/cel-go/parser/macro.go | 201 +- .../google/cel-go/parser/options.go | 13 + .../github.com/google/cel-go/parser/parser.go | 220 +- .../google/cel-go/parser/unparser.go | 222 +- vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md | 6 + .../ginkgo/v2/ginkgo/internal/test_suite.go | 6 +- .../ginkgo/v2/ginkgo/watch/dependencies.go | 2 +- .../ginkgo/v2/ginkgo/watch/package_hash.go | 4 +- .../onsi/ginkgo/v2/reporters/json_report.go | 6 +- .../onsi/ginkgo/v2/types/code_location.go | 2 +- .../onsi/ginkgo/v2/types/version.go | 2 +- vendor/golang.org/x/crypto/cryptobyte/asn1.go | 13 +- vendor/golang.org/x/crypto/ssh/client_auth.go | 20 +- vendor/golang.org/x/crypto/ssh/common.go | 8 + vendor/golang.org/x/crypto/ssh/server.go | 5 +- vendor/golang.org/x/crypto/ssh/tcpip.go | 35 + vendor/golang.org/x/sys/unix/fcntl.go | 2 +- vendor/golang.org/x/sys/unix/ioctl_linux.go | 5 + vendor/golang.org/x/sys/unix/mkerrors.sh | 3 +- vendor/golang.org/x/sys/unix/syscall_bsd.go | 2 +- vendor/golang.org/x/sys/unix/syscall_linux.go | 28 +- .../golang.org/x/sys/unix/syscall_openbsd.go | 14 + .../golang.org/x/sys/unix/syscall_solaris.go | 2 +- .../x/sys/unix/syscall_zos_s390x.go | 2 +- vendor/golang.org/x/sys/unix/zerrors_linux.go | 2 +- .../golang.org/x/sys/unix/zsyscall_linux.go | 15 + .../x/sys/unix/zsyscall_openbsd_386.go | 26 + .../x/sys/unix/zsyscall_openbsd_386.s | 5 + .../x/sys/unix/zsyscall_openbsd_amd64.go | 26 + .../x/sys/unix/zsyscall_openbsd_amd64.s | 5 + .../x/sys/unix/zsyscall_openbsd_arm.go | 26 + .../x/sys/unix/zsyscall_openbsd_arm.s | 5 + .../x/sys/unix/zsyscall_openbsd_arm64.go | 26 + .../x/sys/unix/zsyscall_openbsd_arm64.s | 5 + .../x/sys/unix/zsyscall_openbsd_mips64.go | 26 + .../x/sys/unix/zsyscall_openbsd_mips64.s | 5 + .../x/sys/unix/zsyscall_openbsd_ppc64.go | 26 + .../x/sys/unix/zsyscall_openbsd_ppc64.s | 6 + .../x/sys/unix/zsyscall_openbsd_riscv64.go | 26 + .../x/sys/unix/zsyscall_openbsd_riscv64.s | 5 + vendor/golang.org/x/sys/unix/ztypes_linux.go | 32 + .../x/sys/windows/syscall_windows.go | 2 + .../x/sys/windows/zsyscall_windows.go | 19 + vendor/golang.org/x/time/rate/rate.go | 2 + vendor/golang.org/x/xerrors/LICENSE | 27 - vendor/golang.org/x/xerrors/PATENTS | 22 - vendor/golang.org/x/xerrors/README | 2 - vendor/golang.org/x/xerrors/adaptor.go | 193 -- vendor/golang.org/x/xerrors/codereview.cfg | 1 - vendor/golang.org/x/xerrors/doc.go | 23 - vendor/golang.org/x/xerrors/errors.go | 33 - vendor/golang.org/x/xerrors/fmt.go | 190 -- vendor/golang.org/x/xerrors/format.go | 34 - vendor/golang.org/x/xerrors/frame.go | 56 - .../golang.org/x/xerrors/internal/internal.go | 8 - vendor/golang.org/x/xerrors/wrap.go | 112 - .../google.golang.org/api/internal/version.go | 2 +- vendor/modules.txt | 34 +- 234 files changed, 20771 insertions(+), 12356 deletions(-) delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/LICENSE delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/antlrdoc.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config_set.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/input_stream.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/jcollect.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_context.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_mode.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/rule_context.go delete mode 100644 vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils_set.go create mode 100644 vendor/github.com/antlr4-go/antlr/v4/.gitignore create mode 100644 vendor/github.com/antlr4-go/antlr/v4/LICENSE create mode 100644 vendor/github.com/antlr4-go/antlr/v4/README.md create mode 100644 vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn.go (94%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_config.go create mode 100644 vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn_deserialization_options.go (86%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn_deserializer.go (97%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn_simulator.go (66%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn_state.go (65%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/atn_type.go (100%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/char_stream.go (89%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/common_token_factory.go (100%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/common_token_stream.go (88%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/comparators.go (82%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/configuration.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/dfa.go (76%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/dfa_serializer.go (97%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/dfa_state.go (81%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/diagnostic_error_listener.go (92%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/error_listener.go (62%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/error_strategy.go (58%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/errors.go (73%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/file_stream.go (52%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/input_stream.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/int_stream.go (100%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/interval_set.go (82%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/jcollect.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/lexer.go (78%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/lexer_action.go (78%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/lexer_action_executor.go (70%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/lexer_atn_simulator.go (80%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/ll1_analyzer.go (73%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/nostatistics.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/parser.go (80%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/parser_atn_simulator.go (64%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/parser_rule_context.go (77%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_context.go create mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go create mode 100644 vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/recognizer.go (70%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/rule_context.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/semantic_context.go (92%) create mode 100644 vendor/github.com/antlr4-go/antlr/v4/statistics.go create mode 100644 vendor/github.com/antlr4-go/antlr/v4/stats_data.go rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/token.go (86%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/token_source.go (100%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/token_stream.go (90%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/tokenstream_rewriter.go (73%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/trace_listener.go (100%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/transition.go (67%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/tree.go (62%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/trees.go (81%) rename vendor/github.com/{antlr/antlr4/runtime/Go => antlr4-go}/antlr/v4/utils.go (85%) create mode 100644 vendor/github.com/google/cel-go/cel/folding.go create mode 100644 vendor/github.com/google/cel-go/cel/inlining.go create mode 100644 vendor/github.com/google/cel-go/cel/optimizer.go create mode 100644 vendor/github.com/google/cel-go/cel/validator.go create mode 100644 vendor/github.com/google/cel-go/checker/format.go rename vendor/github.com/google/cel-go/checker/{decls => }/scopes.go (81%) create mode 100644 vendor/github.com/google/cel-go/common/ast/BUILD.bazel create mode 100644 vendor/github.com/google/cel-go/common/ast/ast.go create mode 100644 vendor/github.com/google/cel-go/common/ast/conversion.go create mode 100644 vendor/github.com/google/cel-go/common/ast/expr.go create mode 100644 vendor/github.com/google/cel-go/common/ast/factory.go create mode 100644 vendor/github.com/google/cel-go/common/ast/navigable.go create mode 100644 vendor/github.com/google/cel-go/common/decls/BUILD.bazel create mode 100644 vendor/github.com/google/cel-go/common/decls/decls.go create mode 100644 vendor/github.com/google/cel-go/common/functions/BUILD.bazel create mode 100644 vendor/github.com/google/cel-go/common/functions/functions.go create mode 100644 vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel create mode 100644 vendor/github.com/google/cel-go/common/stdlib/standard.go delete mode 100644 vendor/github.com/google/cel-go/common/types/type.go create mode 100644 vendor/github.com/google/cel-go/common/types/types.go create mode 100644 vendor/github.com/google/cel-go/ext/formatting.go create mode 100644 vendor/github.com/google/cel-go/ext/lists.go delete mode 100644 vendor/github.com/google/cel-go/interpreter/formatting.go delete mode 100644 vendor/github.com/google/cel-go/interpreter/functions/standard.go delete mode 100644 vendor/golang.org/x/xerrors/LICENSE delete mode 100644 vendor/golang.org/x/xerrors/PATENTS delete mode 100644 vendor/golang.org/x/xerrors/README delete mode 100644 vendor/golang.org/x/xerrors/adaptor.go delete mode 100644 vendor/golang.org/x/xerrors/codereview.cfg delete mode 100644 vendor/golang.org/x/xerrors/doc.go delete mode 100644 vendor/golang.org/x/xerrors/errors.go delete mode 100644 vendor/golang.org/x/xerrors/fmt.go delete mode 100644 vendor/golang.org/x/xerrors/format.go delete mode 100644 vendor/golang.org/x/xerrors/frame.go delete mode 100644 vendor/golang.org/x/xerrors/internal/internal.go delete mode 100644 vendor/golang.org/x/xerrors/wrap.go diff --git a/go.mod b/go.mod index e33510d5d..3cc87b0ad 100644 --- a/go.mod +++ b/go.mod @@ -12,11 +12,11 @@ require ( github.com/google/uuid v1.4.0 github.com/kubernetes-csi/csi-lib-utils v0.16.0 github.com/kubernetes-csi/csi-test/v5 v5.2.0 - github.com/onsi/ginkgo/v2 v2.13.1 + github.com/onsi/ginkgo/v2 v2.13.2 github.com/onsi/gomega v1.30.0 - golang.org/x/net v0.18.0 - golang.org/x/oauth2 v0.14.0 - google.golang.org/api v0.151.0 + golang.org/x/net v0.19.0 + golang.org/x/oauth2 v0.15.0 + google.golang.org/api v0.152.0 google.golang.org/grpc v1.59.0 gopkg.in/yaml.v3 v3.0.1 k8s.io/api v0.28.4 @@ -32,12 +32,12 @@ require ( ) require ( - cloud.google.com/go v0.110.10 // indirect + cloud.google.com/go v0.111.0 // indirect cloud.google.com/go/compute v1.23.3 // indirect github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver v1.5.0 // indirect github.com/NYTimes/gziphandler v1.1.1 // indirect - github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df // indirect + github.com/antlr4-go/antlr/v4 v4.13.0 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/blang/semver/v4 v4.0.0 // indirect @@ -61,7 +61,7 @@ require ( github.com/gogo/protobuf v1.3.2 // indirect github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect github.com/golang/protobuf v1.5.3 // indirect - github.com/google/cel-go v0.16.1 // indirect + github.com/google/cel-go v0.18.2 // indirect github.com/google/gnostic-models v0.6.8 // indirect github.com/google/gofuzz v1.2.1-0.20210504230335-f78f29fc09ea // indirect github.com/google/pprof v0.0.0-20231101202521-4ca4178f5c7a // indirect @@ -111,14 +111,14 @@ require ( go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.26.0 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect - golang.org/x/crypto v0.15.0 // indirect + golang.org/x/crypto v0.16.0 // indirect golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa // indirect golang.org/x/sync v0.5.0 // indirect - golang.org/x/sys v0.14.0 // indirect - golang.org/x/term v0.14.0 // indirect + golang.org/x/sys v0.15.0 // indirect + golang.org/x/term v0.15.0 // indirect golang.org/x/text v0.14.0 // indirect - golang.org/x/time v0.4.0 // indirect - golang.org/x/tools v0.15.0 // indirect + golang.org/x/time v0.5.0 // indirect + golang.org/x/tools v0.16.0 // indirect golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect google.golang.org/appengine v1.6.8 // indirect diff --git a/go.sum b/go.sum index 3d2cf0f5d..5e3de9135 100755 --- a/go.sum +++ b/go.sum @@ -7,8 +7,8 @@ cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTj cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0= cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To= cloud.google.com/go v0.53.0/go.mod h1:fp/UouUEsRkN6ryDKNW/Upv/JBKnv6WDthjR6+vze6M= -cloud.google.com/go v0.110.10 h1:LXy9GEO+timppncPIAZoOj3l58LIU9k+kn48AN7IO3Y= -cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic= +cloud.google.com/go v0.111.0 h1:YHLKNupSD1KqjDbQ3+LVdQ81h/UJbJyZG203cEfnQgM= +cloud.google.com/go v0.111.0/go.mod h1:0mibmpKP1TyOOFYQY5izo0LnT+ecvOQ0Sg3OdmMiNRU= cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE= cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk= @@ -33,8 +33,8 @@ github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3Q github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= -github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df h1:7RFfzj4SSt6nnvCPbCqijJi1nWCd+TqAT3bYCStRC18= -github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df/go.mod h1:pSwJ0fSY5KhvocuWSx4fz3BA8OrA1bQn+K1Eli3BRwM= +github.com/antlr4-go/antlr/v4 v4.13.0 h1:lxCg3LAv+EUK6t1i0y1V6/SLeUi0eKEKdhQAlS8TVTI= +github.com/antlr4-go/antlr/v4 v4.13.0/go.mod h1:pfChB/xh/Unjila75QW7+VU4TSnWnnk9UTnmpPaOR2g= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -142,8 +142,8 @@ github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Z github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4= github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA= -github.com/google/cel-go v0.16.1 h1:3hZfSNiAU3KOiNtxuFXVp5WFy4hf/Ly3Sa4/7F8SXNo= -github.com/google/cel-go v0.16.1/go.mod h1:HXZKzB0LXqer5lHHgfWAnlYwJaQBDKMjxjulNQzhwhY= +github.com/google/cel-go v0.18.2 h1:L0B6sNBSVmt0OyECi8v6VOS74KOc9W/tLiWKfZABvf4= +github.com/google/cel-go v0.18.2/go.mod h1:kWcIzTsPX0zmQ+H3TirHstLLf9ep5QTsZBN9u4dOYLg= github.com/google/gnostic-models v0.6.8 h1:yo/ABAfM5IMRsS1VnXjTBvUb61tFIHozhlYvRgGre9I= github.com/google/gnostic-models v0.6.8/go.mod h1:5n7qKqH0f5wFt+aWF8CW6pZLLNOfYuF5OpfBSENuI8U= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= @@ -241,8 +241,8 @@ github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9G github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= -github.com/onsi/ginkgo/v2 v2.13.1 h1:LNGfMbR2OVGBfXjvRZIZ2YCTQdGKtPLvuI1rMCCj3OU= -github.com/onsi/ginkgo/v2 v2.13.1/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= +github.com/onsi/ginkgo/v2 v2.13.2 h1:Bi2gGVkfn6gQcjNjZJVO8Gf0FHzMPf2phUei9tejVMs= +github.com/onsi/ginkgo/v2 v2.13.2/go.mod h1:XStQ8QcGwLyF4HdfcZB8SFOS/MWCgDuXMSBe6zrvLgM= github.com/onsi/gomega v1.30.0 h1:hvMK7xYz4D3HapigLTeGdId/NcfQx1VHMJc60ew99+8= github.com/onsi/gomega v1.30.0/go.mod h1:9sxs+SwGrKI0+PWe4Fxa9tFQQBG5xSsSbMXOI8PPpoQ= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= @@ -349,8 +349,8 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.15.0 h1:frVn1TEaCEaZcn3Tmd7Y2b5KKPaZ+I32Q2OA3kYp5TA= -golang.org/x/crypto v0.15.0/go.mod h1:4ChreQoLWfG3xLDer1WdlH5NdlQ3+mwnQq1YTKY+72g= +golang.org/x/crypto v0.16.0 h1:mMMrFzRSCF0GvB7Ne27XVtVAaXLrPmgPC7/v0tkwHaY= +golang.org/x/crypto v0.16.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -401,15 +401,15 @@ golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwY golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.7.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= -golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= +golang.org/x/net v0.19.0 h1:zTwKpTd2XuCqf8huc7Fo2iSy+4RHPd10s4KzeTnVr1c= +golang.org/x/net v0.19.0/go.mod h1:CfAk/cbD4CthTvqiEl8NpboMuiuOYsAr/7NOjZJtv1U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= -golang.org/x/oauth2 v0.14.0 h1:P0Vrf/2538nmC0H+pEQ3MNFRRnVR7RlqyVw+bvm26z0= -golang.org/x/oauth2 v0.14.0/go.mod h1:lAtNWgaWfL4cm7j2OV8TxGi9Qb7ECORx8DktCY74OwM= +golang.org/x/oauth2 v0.15.0 h1:s8pnnxNVzjWyrvYdFUQq5llS1PX2zhPXmccZv99h7uQ= +golang.org/x/oauth2 v0.15.0/go.mod h1:q48ptWNTY5XWf+JNten23lcvHpLJ0ZSxF5ttTHKVCAM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= @@ -439,13 +439,13 @@ golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.14.0 h1:Vz7Qs629MkJkGyHxUlRHizWJRG2j8fbQKjELVSNhy7Q= -golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc= +golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= -golang.org/x/term v0.14.0 h1:LGK9IlZ8T9jvdy6cTdfKUCltatMFOehAQo9SRC46UQ8= -golang.org/x/term v0.14.0/go.mod h1:TySc+nGkYR6qt8km8wUhuFRTVSMIX3XPR58y2lC8vww= +golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4= +golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -458,8 +458,8 @@ golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/time v0.4.0 h1:Z81tqI5ddIoXDPvVQ7/7CC9TnLM7ubaFG2qXYd5BbYY= -golang.org/x/time v0.4.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= +golang.org/x/time v0.5.0 h1:o7cqy6amK/52YcAKIPlM3a+Fpj35zvRj2TP+e1xFSfk= +golang.org/x/time v0.5.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= @@ -487,8 +487,8 @@ golang.org/x/tools v0.0.0-20200212150539-ea181f53ac56/go.mod h1:TB2adYChydJhpapK golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.15.0 h1:zdAyfUGbYmuVokhzVmghFl2ZJh5QhcfebBgmVPFYA+8= -golang.org/x/tools v0.15.0/go.mod h1:hpksKq4dtpQWS1uQ61JkdqWM3LscIS6Slf+VVkm+wQk= +golang.org/x/tools v0.16.0 h1:GO788SKMRunPIBCXiQyo2AaexLstOrVhuAL5YwsckQM= +golang.org/x/tools v0.16.0/go.mod h1:kYVVN6I1mBNoB1OX+noeBjbRk4IUEPa7JJ+TJMEooJ0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -505,8 +505,8 @@ google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsb google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI= google.golang.org/api v0.17.0/go.mod h1:BwFmGc8tA3vsd7r/7kR8DY7iEEGSU04BFxCo5jP/sfE= -google.golang.org/api v0.151.0 h1:FhfXLO/NFdJIzQtCqjpysWwqKk8AzGWBUhMIx67cVDU= -google.golang.org/api v0.151.0/go.mod h1:ccy+MJ6nrYFgE3WgRx/AMXOxOmU8Q4hSa+jjibzhxcg= +google.golang.org/api v0.152.0 h1:t0r1vPnfMc260S2Ci+en7kfCZaLOPs5KI0sVV/6jZrY= +google.golang.org/api v0.152.0/go.mod h1:3qNJX5eOmhiWYc67jRA/3GsDw97UFb5ivv7Y2PrriAY= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= diff --git a/vendor/cloud.google.com/go/internal/.repo-metadata-full.json b/vendor/cloud.google.com/go/internal/.repo-metadata-full.json index 31d172047..46c4094d3 100644 --- a/vendor/cloud.google.com/go/internal/.repo-metadata-full.json +++ b/vendor/cloud.google.com/go/internal/.repo-metadata-full.json @@ -619,6 +619,16 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/cloudprofiler/apiv2": { + "api_shortname": "cloudprofiler", + "distribution_name": "cloud.google.com/go/cloudprofiler/apiv2", + "description": "Cloud Profiler API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/cloudprofiler/latest/apiv2", + "release_level": "preview", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/cloudtasks/apiv2": { "api_shortname": "cloudtasks", "distribution_name": "cloud.google.com/go/cloudtasks/apiv2", @@ -1019,6 +1029,16 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/edgenetwork/apiv1": { + "api_shortname": "edgenetwork", + "distribution_name": "cloud.google.com/go/edgenetwork/apiv1", + "description": "Distributed Cloud Edge Network API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/edgenetwork/latest/apiv1", + "release_level": "preview", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/errorreporting": { "api_shortname": "clouderrorreporting", "distribution_name": "cloud.google.com/go/errorreporting", @@ -1099,6 +1119,16 @@ "release_level": "stable", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/firestore/apiv1/admin": { + "api_shortname": "firestore", + "distribution_name": "cloud.google.com/go/firestore/apiv1/admin", + "description": "Cloud Firestore API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/firestore/latest/apiv1/admin", + "release_level": "stable", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/functions/apiv1": { "api_shortname": "cloudfunctions", "distribution_name": "cloud.google.com/go/functions/apiv1", @@ -2279,6 +2309,16 @@ "release_level": "preview", "library_type": "GAPIC_AUTO" }, + "cloud.google.com/go/telcoautomation/apiv1": { + "api_shortname": "telcoautomation", + "distribution_name": "cloud.google.com/go/telcoautomation/apiv1", + "description": "Telco Automation API", + "language": "go", + "client_library_type": "generated", + "client_documentation": "https://cloud.google.com/go/docs/reference/cloud.google.com/go/telcoautomation/latest/apiv1", + "release_level": "preview", + "library_type": "GAPIC_AUTO" + }, "cloud.google.com/go/texttospeech/apiv1": { "api_shortname": "texttospeech", "distribution_name": "cloud.google.com/go/texttospeech/apiv1", diff --git a/vendor/cloud.google.com/go/internal/trace/trace.go b/vendor/cloud.google.com/go/internal/trace/trace.go index c201d343e..f6b88253b 100644 --- a/vendor/cloud.google.com/go/internal/trace/trace.go +++ b/vendor/cloud.google.com/go/internal/trace/trace.go @@ -16,35 +16,94 @@ package trace import ( "context" + "errors" "fmt" + "os" + "strings" "go.opencensus.io/trace" - "golang.org/x/xerrors" + "go.opentelemetry.io/otel" + "go.opentelemetry.io/otel/attribute" + "go.opentelemetry.io/otel/codes" + ottrace "go.opentelemetry.io/otel/trace" "google.golang.org/api/googleapi" "google.golang.org/genproto/googleapis/rpc/code" "google.golang.org/grpc/status" ) -// StartSpan adds a span to the trace with the given name. +const ( + telemetryPlatformTracingOpenCensus = "opencensus" + telemetryPlatformTracingOpenTelemetry = "opentelemetry" + telemetryPlatformTracingVar = "GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING" +) + +var ( + // TODO(chrisdsmith): Should the name of the OpenTelemetry tracer be public and mutable? + openTelemetryTracerName string = "cloud.google.com/go" + openTelemetryTracingEnabled bool = strings.EqualFold(strings.TrimSpace( + os.Getenv(telemetryPlatformTracingVar)), telemetryPlatformTracingOpenTelemetry) +) + +// IsOpenCensusTracingEnabled returns true if the environment variable +// GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING is NOT set to the +// case-insensitive value "opentelemetry". +func IsOpenCensusTracingEnabled() bool { + return !IsOpenTelemetryTracingEnabled() +} + +// IsOpenTelemetryTracingEnabled returns true if the environment variable +// GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING is set to the +// case-insensitive value "opentelemetry". +func IsOpenTelemetryTracingEnabled() bool { + return openTelemetryTracingEnabled +} + +// StartSpan adds a span to the trace with the given name. If IsOpenCensusTracingEnabled +// returns true, the span will be an OpenCensus span. If IsOpenTelemetryTracingEnabled +// returns true, the span will be an OpenTelemetry span. Set the environment variable +// GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING to the case-insensitive +// value "opentelemetry" before loading the package to use OpenTelemetry tracing. +// The default will remain OpenCensus until [TBD], at which time the default will +// switch to "opentelemetry" and explicitly setting the environment variable to +// "opencensus" will be required to continue using OpenCensus tracing. func StartSpan(ctx context.Context, name string) context.Context { - ctx, _ = trace.StartSpan(ctx, name) + if IsOpenTelemetryTracingEnabled() { + ctx, _ = otel.GetTracerProvider().Tracer(openTelemetryTracerName).Start(ctx, name) + } else { + ctx, _ = trace.StartSpan(ctx, name) + } return ctx } -// EndSpan ends a span with the given error. +// EndSpan ends a span with the given error. If IsOpenCensusTracingEnabled +// returns true, the span will be an OpenCensus span. If IsOpenTelemetryTracingEnabled +// returns true, the span will be an OpenTelemetry span. Set the environment variable +// GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING to the case-insensitive +// value "opentelemetry" before loading the package to use OpenTelemetry tracing. +// The default will remain OpenCensus until [TBD], at which time the default will +// switch to "opentelemetry" and explicitly setting the environment variable to +// "opencensus" will be required to continue using OpenCensus tracing. func EndSpan(ctx context.Context, err error) { - span := trace.FromContext(ctx) - if err != nil { - span.SetStatus(toStatus(err)) + if IsOpenTelemetryTracingEnabled() { + span := ottrace.SpanFromContext(ctx) + if err != nil { + span.SetStatus(codes.Error, toOpenTelemetryStatusDescription(err)) + span.RecordError(err) + } + span.End() + } else { + span := trace.FromContext(ctx) + if err != nil { + span.SetStatus(toStatus(err)) + } + span.End() } - span.End() } -// toStatus interrogates an error and converts it to an appropriate -// OpenCensus status. +// toStatus converts an error to an equivalent OpenCensus status. func toStatus(err error) trace.Status { var err2 *googleapi.Error - if ok := xerrors.As(err, &err2); ok { + if ok := errors.As(err, &err2); ok { return trace.Status{Code: httpStatusCodeToOCCode(err2.Code), Message: err2.Message} } else if s, ok := status.FromError(err); ok { return trace.Status{Code: int32(s.Code()), Message: s.Message()} @@ -53,6 +112,18 @@ func toStatus(err error) trace.Status { } } +// toOpenTelemetryStatus converts an error to an equivalent OpenTelemetry status description. +func toOpenTelemetryStatusDescription(err error) string { + var err2 *googleapi.Error + if ok := errors.As(err, &err2); ok { + return err2.Message + } else if s, ok := status.FromError(err); ok { + return s.Message() + } else { + return err.Error() + } +} + // TODO(deklerk): switch to using OpenCensus function when it becomes available. // Reference: https://github.com/googleapis/googleapis/blob/26b634d2724ac5dd30ae0b0cbfb01f07f2e4050e/google/rpc/code.proto func httpStatusCodeToOCCode(httpStatusCode int) int32 { @@ -86,10 +157,33 @@ func httpStatusCodeToOCCode(httpStatusCode int) int32 { } } -// TODO: (odeke-em): perhaps just pass around spans due to the cost -// incurred from using trace.FromContext(ctx) yet we could avoid -// throwing away the work done by ctx, span := trace.StartSpan. +// TracePrintf retrieves the current OpenCensus or OpenTelemetry span from context, then: +// * calls Span.Annotatef if OpenCensus is enabled; or +// * calls Span.AddEvent if OpenTelemetry is enabled. +// +// If IsOpenCensusTracingEnabled returns true, the expected span must be an +// OpenCensus span. If IsOpenTelemetryTracingEnabled returns true, the expected +// span must be an OpenTelemetry span. Set the environment variable +// GOOGLE_API_GO_EXPERIMENTAL_TELEMETRY_PLATFORM_TRACING to the case-insensitive +// value "opentelemetry" before loading the package to use OpenTelemetry tracing. +// The default will remain OpenCensus until [TBD], at which time the default will +// switch to "opentelemetry" and explicitly setting the environment variable to +// "opencensus" will be required to continue using OpenCensus tracing. func TracePrintf(ctx context.Context, attrMap map[string]interface{}, format string, args ...interface{}) { + if IsOpenTelemetryTracingEnabled() { + attrs := otAttrs(attrMap) + ottrace.SpanFromContext(ctx).AddEvent(fmt.Sprintf(format, args...), ottrace.WithAttributes(attrs...)) + } else { + attrs := ocAttrs(attrMap) + // TODO: (odeke-em): perhaps just pass around spans due to the cost + // incurred from using trace.FromContext(ctx) yet we could avoid + // throwing away the work done by ctx, span := trace.StartSpan. + trace.FromContext(ctx).Annotatef(attrs, format, args...) + } +} + +// ocAttrs converts a generic map to OpenCensus attributes. +func ocAttrs(attrMap map[string]interface{}) []trace.Attribute { var attrs []trace.Attribute for k, v := range attrMap { var a trace.Attribute @@ -107,5 +201,27 @@ func TracePrintf(ctx context.Context, attrMap map[string]interface{}, format str } attrs = append(attrs, a) } - trace.FromContext(ctx).Annotatef(attrs, format, args...) + return attrs +} + +// otAttrs converts a generic map to OpenTelemetry attributes. +func otAttrs(attrMap map[string]interface{}) []attribute.KeyValue { + var attrs []attribute.KeyValue + for k, v := range attrMap { + var a attribute.KeyValue + switch v := v.(type) { + case string: + a = attribute.Key(k).String(v) + case bool: + a = attribute.Key(k).Bool(v) + case int: + a = attribute.Key(k).Int(v) + case int64: + a = attribute.Key(k).Int64(v) + default: + a = attribute.Key(k).String(fmt.Sprintf("%#v", v)) + } + attrs = append(attrs, a) + } + return attrs } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/LICENSE b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/LICENSE deleted file mode 100644 index 52cf18e42..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/LICENSE +++ /dev/null @@ -1,26 +0,0 @@ -Copyright 2021 The ANTLR Project - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - 1. Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - 3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from this - software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/antlrdoc.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/antlrdoc.go deleted file mode 100644 index ab5121267..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/antlrdoc.go +++ /dev/null @@ -1,68 +0,0 @@ -/* -Package antlr implements the Go version of the ANTLR 4 runtime. - -# The ANTLR Tool - -ANTLR (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing, -or translating structured text or binary files. It's widely used to build languages, tools, and frameworks. -From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface -(or visitor) that makes it easy to respond to the recognition of phrases of interest. - -# Code Generation - -ANTLR supports the generation of code in a number of [target languages], and the generated code is supported by a -runtime library, written specifically to support the generated code in the target language. This library is the -runtime for the Go target. - -To generate code for the go target, it is generally recommended to place the source grammar files in a package of -their own, and use the `.sh` script method of generating code, using the go generate directive. In that same directory -it is usual, though not required, to place the antlr tool that should be used to generate the code. That does mean -that the antlr tool JAR file will be checked in to your source code control though, so you are free to use any other -way of specifying the version of the ANTLR tool to use, such as aliasing in `.zshrc` or equivalent, or a profile in -your IDE, or configuration in your CI system. - -Here is a general template for an ANTLR based recognizer in Go: - - . - ├── myproject - ├── parser - │ ├── mygrammar.g4 - │ ├── antlr-4.12.0-complete.jar - │ ├── error_listeners.go - │ ├── generate.go - │ ├── generate.sh - ├── go.mod - ├── go.sum - ├── main.go - └── main_test.go - -Make sure that the package statement in your grammar file(s) reflects the go package they exist in. -The generate.go file then looks like this: - - package parser - - //go:generate ./generate.sh - -And the generate.sh file will look similar to this: - - #!/bin/sh - - alias antlr4='java -Xmx500M -cp "./antlr4-4.12.0-complete.jar:$CLASSPATH" org.antlr.v4.Tool' - antlr4 -Dlanguage=Go -no-visitor -package parser *.g4 - -depending on whether you want visitors or listeners or any other ANTLR options. - -From the command line at the root of your package “myproject” you can then simply issue the command: - - go generate ./... - -# Copyright Notice - -Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. - -Use of this file is governed by the BSD 3-clause license, which can be found in the [LICENSE.txt] file in the project root. - -[target languages]: https://github.com/antlr/antlr4/tree/master/runtime -[LICENSE.txt]: https://github.com/antlr/antlr4/blob/master/LICENSE.txt -*/ -package antlr diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config.go deleted file mode 100644 index 7619fa172..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config.go +++ /dev/null @@ -1,303 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -import ( - "fmt" -) - -// ATNConfig is a tuple: (ATN state, predicted alt, syntactic, semantic -// context). The syntactic context is a graph-structured stack node whose -// path(s) to the root is the rule invocation(s) chain used to arrive at the -// state. The semantic context is the tree of semantic predicates encountered -// before reaching an ATN state. -type ATNConfig interface { - Equals(o Collectable[ATNConfig]) bool - Hash() int - - GetState() ATNState - GetAlt() int - GetSemanticContext() SemanticContext - - GetContext() PredictionContext - SetContext(PredictionContext) - - GetReachesIntoOuterContext() int - SetReachesIntoOuterContext(int) - - String() string - - getPrecedenceFilterSuppressed() bool - setPrecedenceFilterSuppressed(bool) -} - -type BaseATNConfig struct { - precedenceFilterSuppressed bool - state ATNState - alt int - context PredictionContext - semanticContext SemanticContext - reachesIntoOuterContext int -} - -func NewBaseATNConfig7(old *BaseATNConfig) ATNConfig { // TODO: Dup - return &BaseATNConfig{ - state: old.state, - alt: old.alt, - context: old.context, - semanticContext: old.semanticContext, - reachesIntoOuterContext: old.reachesIntoOuterContext, - } -} - -func NewBaseATNConfig6(state ATNState, alt int, context PredictionContext) *BaseATNConfig { - return NewBaseATNConfig5(state, alt, context, SemanticContextNone) -} - -func NewBaseATNConfig5(state ATNState, alt int, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig { - if semanticContext == nil { - panic("semanticContext cannot be nil") // TODO: Necessary? - } - - return &BaseATNConfig{state: state, alt: alt, context: context, semanticContext: semanticContext} -} - -func NewBaseATNConfig4(c ATNConfig, state ATNState) *BaseATNConfig { - return NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext()) -} - -func NewBaseATNConfig3(c ATNConfig, state ATNState, semanticContext SemanticContext) *BaseATNConfig { - return NewBaseATNConfig(c, state, c.GetContext(), semanticContext) -} - -func NewBaseATNConfig2(c ATNConfig, semanticContext SemanticContext) *BaseATNConfig { - return NewBaseATNConfig(c, c.GetState(), c.GetContext(), semanticContext) -} - -func NewBaseATNConfig1(c ATNConfig, state ATNState, context PredictionContext) *BaseATNConfig { - return NewBaseATNConfig(c, state, context, c.GetSemanticContext()) -} - -func NewBaseATNConfig(c ATNConfig, state ATNState, context PredictionContext, semanticContext SemanticContext) *BaseATNConfig { - if semanticContext == nil { - panic("semanticContext cannot be nil") - } - - return &BaseATNConfig{ - state: state, - alt: c.GetAlt(), - context: context, - semanticContext: semanticContext, - reachesIntoOuterContext: c.GetReachesIntoOuterContext(), - precedenceFilterSuppressed: c.getPrecedenceFilterSuppressed(), - } -} - -func (b *BaseATNConfig) getPrecedenceFilterSuppressed() bool { - return b.precedenceFilterSuppressed -} - -func (b *BaseATNConfig) setPrecedenceFilterSuppressed(v bool) { - b.precedenceFilterSuppressed = v -} - -func (b *BaseATNConfig) GetState() ATNState { - return b.state -} - -func (b *BaseATNConfig) GetAlt() int { - return b.alt -} - -func (b *BaseATNConfig) SetContext(v PredictionContext) { - b.context = v -} -func (b *BaseATNConfig) GetContext() PredictionContext { - return b.context -} - -func (b *BaseATNConfig) GetSemanticContext() SemanticContext { - return b.semanticContext -} - -func (b *BaseATNConfig) GetReachesIntoOuterContext() int { - return b.reachesIntoOuterContext -} - -func (b *BaseATNConfig) SetReachesIntoOuterContext(v int) { - b.reachesIntoOuterContext = v -} - -// Equals is the default comparison function for an ATNConfig when no specialist implementation is required -// for a collection. -// -// An ATN configuration is equal to another if both have the same state, they -// predict the same alternative, and syntactic/semantic contexts are the same. -func (b *BaseATNConfig) Equals(o Collectable[ATNConfig]) bool { - if b == o { - return true - } else if o == nil { - return false - } - - var other, ok = o.(*BaseATNConfig) - - if !ok { - return false - } - - var equal bool - - if b.context == nil { - equal = other.context == nil - } else { - equal = b.context.Equals(other.context) - } - - var ( - nums = b.state.GetStateNumber() == other.state.GetStateNumber() - alts = b.alt == other.alt - cons = b.semanticContext.Equals(other.semanticContext) - sups = b.precedenceFilterSuppressed == other.precedenceFilterSuppressed - ) - - return nums && alts && cons && sups && equal -} - -// Hash is the default hash function for BaseATNConfig, when no specialist hash function -// is required for a collection -func (b *BaseATNConfig) Hash() int { - var c int - if b.context != nil { - c = b.context.Hash() - } - - h := murmurInit(7) - h = murmurUpdate(h, b.state.GetStateNumber()) - h = murmurUpdate(h, b.alt) - h = murmurUpdate(h, c) - h = murmurUpdate(h, b.semanticContext.Hash()) - return murmurFinish(h, 4) -} - -func (b *BaseATNConfig) String() string { - var s1, s2, s3 string - - if b.context != nil { - s1 = ",[" + fmt.Sprint(b.context) + "]" - } - - if b.semanticContext != SemanticContextNone { - s2 = "," + fmt.Sprint(b.semanticContext) - } - - if b.reachesIntoOuterContext > 0 { - s3 = ",up=" + fmt.Sprint(b.reachesIntoOuterContext) - } - - return fmt.Sprintf("(%v,%v%v%v%v)", b.state, b.alt, s1, s2, s3) -} - -type LexerATNConfig struct { - *BaseATNConfig - lexerActionExecutor *LexerActionExecutor - passedThroughNonGreedyDecision bool -} - -func NewLexerATNConfig6(state ATNState, alt int, context PredictionContext) *LexerATNConfig { - return &LexerATNConfig{BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone)} -} - -func NewLexerATNConfig5(state ATNState, alt int, context PredictionContext, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig { - return &LexerATNConfig{ - BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone), - lexerActionExecutor: lexerActionExecutor, - } -} - -func NewLexerATNConfig4(c *LexerATNConfig, state ATNState) *LexerATNConfig { - return &LexerATNConfig{ - BaseATNConfig: NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext()), - lexerActionExecutor: c.lexerActionExecutor, - passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state), - } -} - -func NewLexerATNConfig3(c *LexerATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *LexerATNConfig { - return &LexerATNConfig{ - BaseATNConfig: NewBaseATNConfig(c, state, c.GetContext(), c.GetSemanticContext()), - lexerActionExecutor: lexerActionExecutor, - passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state), - } -} - -func NewLexerATNConfig2(c *LexerATNConfig, state ATNState, context PredictionContext) *LexerATNConfig { - return &LexerATNConfig{ - BaseATNConfig: NewBaseATNConfig(c, state, context, c.GetSemanticContext()), - lexerActionExecutor: c.lexerActionExecutor, - passedThroughNonGreedyDecision: checkNonGreedyDecision(c, state), - } -} - -func NewLexerATNConfig1(state ATNState, alt int, context PredictionContext) *LexerATNConfig { - return &LexerATNConfig{BaseATNConfig: NewBaseATNConfig5(state, alt, context, SemanticContextNone)} -} - -// Hash is the default hash function for LexerATNConfig objects, it can be used directly or via -// the default comparator [ObjEqComparator]. -func (l *LexerATNConfig) Hash() int { - var f int - if l.passedThroughNonGreedyDecision { - f = 1 - } else { - f = 0 - } - h := murmurInit(7) - h = murmurUpdate(h, l.state.GetStateNumber()) - h = murmurUpdate(h, l.alt) - h = murmurUpdate(h, l.context.Hash()) - h = murmurUpdate(h, l.semanticContext.Hash()) - h = murmurUpdate(h, f) - h = murmurUpdate(h, l.lexerActionExecutor.Hash()) - h = murmurFinish(h, 6) - return h -} - -// Equals is the default comparison function for LexerATNConfig objects, it can be used directly or via -// the default comparator [ObjEqComparator]. -func (l *LexerATNConfig) Equals(other Collectable[ATNConfig]) bool { - if l == other { - return true - } - var othert, ok = other.(*LexerATNConfig) - - if l == other { - return true - } else if !ok { - return false - } else if l.passedThroughNonGreedyDecision != othert.passedThroughNonGreedyDecision { - return false - } - - var b bool - - if l.lexerActionExecutor != nil { - b = !l.lexerActionExecutor.Equals(othert.lexerActionExecutor) - } else { - b = othert.lexerActionExecutor != nil - } - - if b { - return false - } - - return l.BaseATNConfig.Equals(othert.BaseATNConfig) -} - -func checkNonGreedyDecision(source *LexerATNConfig, target ATNState) bool { - var ds, ok = target.(DecisionState) - - return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy()) -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config_set.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config_set.go deleted file mode 100644 index 43e9b33f3..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_config_set.go +++ /dev/null @@ -1,441 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -import ( - "fmt" -) - -type ATNConfigSet interface { - Hash() int - Equals(o Collectable[ATNConfig]) bool - Add(ATNConfig, *DoubleDict) bool - AddAll([]ATNConfig) bool - - GetStates() *JStore[ATNState, Comparator[ATNState]] - GetPredicates() []SemanticContext - GetItems() []ATNConfig - - OptimizeConfigs(interpreter *BaseATNSimulator) - - Length() int - IsEmpty() bool - Contains(ATNConfig) bool - ContainsFast(ATNConfig) bool - Clear() - String() string - - HasSemanticContext() bool - SetHasSemanticContext(v bool) - - ReadOnly() bool - SetReadOnly(bool) - - GetConflictingAlts() *BitSet - SetConflictingAlts(*BitSet) - - Alts() *BitSet - - FullContext() bool - - GetUniqueAlt() int - SetUniqueAlt(int) - - GetDipsIntoOuterContext() bool - SetDipsIntoOuterContext(bool) -} - -// BaseATNConfigSet is a specialized set of ATNConfig that tracks information -// about its elements and can combine similar configurations using a -// graph-structured stack. -type BaseATNConfigSet struct { - cachedHash int - - // configLookup is used to determine whether two BaseATNConfigSets are equal. We - // need all configurations with the same (s, i, _, semctx) to be equal. A key - // effectively doubles the number of objects associated with ATNConfigs. All - // keys are hashed by (s, i, _, pi), not including the context. Wiped out when - // read-only because a set becomes a DFA state. - configLookup *JStore[ATNConfig, Comparator[ATNConfig]] - - // configs is the added elements. - configs []ATNConfig - - // TODO: These fields make me pretty uncomfortable, but it is nice to pack up - // info together because it saves recomputation. Can we track conflicts as they - // are added to save scanning configs later? - conflictingAlts *BitSet - - // dipsIntoOuterContext is used by parsers and lexers. In a lexer, it indicates - // we hit a pred while computing a closure operation. Do not make a DFA state - // from the BaseATNConfigSet in this case. TODO: How is this used by parsers? - dipsIntoOuterContext bool - - // fullCtx is whether it is part of a full context LL prediction. Used to - // determine how to merge $. It is a wildcard with SLL, but not for an LL - // context merge. - fullCtx bool - - // Used in parser and lexer. In lexer, it indicates we hit a pred - // while computing a closure operation. Don't make a DFA state from a. - hasSemanticContext bool - - // readOnly is whether it is read-only. Do not - // allow any code to manipulate the set if true because DFA states will point at - // sets and those must not change. It not, protect other fields; conflictingAlts - // in particular, which is assigned after readOnly. - readOnly bool - - // TODO: These fields make me pretty uncomfortable, but it is nice to pack up - // info together because it saves recomputation. Can we track conflicts as they - // are added to save scanning configs later? - uniqueAlt int -} - -func (b *BaseATNConfigSet) Alts() *BitSet { - alts := NewBitSet() - for _, it := range b.configs { - alts.add(it.GetAlt()) - } - return alts -} - -func NewBaseATNConfigSet(fullCtx bool) *BaseATNConfigSet { - return &BaseATNConfigSet{ - cachedHash: -1, - configLookup: NewJStore[ATNConfig, Comparator[ATNConfig]](aConfCompInst), - fullCtx: fullCtx, - } -} - -// Add merges contexts with existing configs for (s, i, pi, _), where s is the -// ATNConfig.state, i is the ATNConfig.alt, and pi is the -// ATNConfig.semanticContext. We use (s,i,pi) as the key. Updates -// dipsIntoOuterContext and hasSemanticContext when necessary. -func (b *BaseATNConfigSet) Add(config ATNConfig, mergeCache *DoubleDict) bool { - if b.readOnly { - panic("set is read-only") - } - - if config.GetSemanticContext() != SemanticContextNone { - b.hasSemanticContext = true - } - - if config.GetReachesIntoOuterContext() > 0 { - b.dipsIntoOuterContext = true - } - - existing, present := b.configLookup.Put(config) - - // The config was not already in the set - // - if !present { - b.cachedHash = -1 - b.configs = append(b.configs, config) // Track order here - return true - } - - // Merge a previous (s, i, pi, _) with it and save the result - rootIsWildcard := !b.fullCtx - merged := merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache) - - // No need to check for existing.context because config.context is in the cache, - // since the only way to create new graphs is the "call rule" and here. We cache - // at both places. - existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext())) - - // Preserve the precedence filter suppression during the merge - if config.getPrecedenceFilterSuppressed() { - existing.setPrecedenceFilterSuppressed(true) - } - - // Replace the context because there is no need to do alt mapping - existing.SetContext(merged) - - return true -} - -func (b *BaseATNConfigSet) GetStates() *JStore[ATNState, Comparator[ATNState]] { - - // states uses the standard comparator provided by the ATNState instance - // - states := NewJStore[ATNState, Comparator[ATNState]](aStateEqInst) - - for i := 0; i < len(b.configs); i++ { - states.Put(b.configs[i].GetState()) - } - - return states -} - -func (b *BaseATNConfigSet) HasSemanticContext() bool { - return b.hasSemanticContext -} - -func (b *BaseATNConfigSet) SetHasSemanticContext(v bool) { - b.hasSemanticContext = v -} - -func (b *BaseATNConfigSet) GetPredicates() []SemanticContext { - preds := make([]SemanticContext, 0) - - for i := 0; i < len(b.configs); i++ { - c := b.configs[i].GetSemanticContext() - - if c != SemanticContextNone { - preds = append(preds, c) - } - } - - return preds -} - -func (b *BaseATNConfigSet) GetItems() []ATNConfig { - return b.configs -} - -func (b *BaseATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) { - if b.readOnly { - panic("set is read-only") - } - - if b.configLookup.Len() == 0 { - return - } - - for i := 0; i < len(b.configs); i++ { - config := b.configs[i] - - config.SetContext(interpreter.getCachedContext(config.GetContext())) - } -} - -func (b *BaseATNConfigSet) AddAll(coll []ATNConfig) bool { - for i := 0; i < len(coll); i++ { - b.Add(coll[i], nil) - } - - return false -} - -// Compare is a hack function just to verify that adding DFAstares to the known -// set works, so long as comparison of ATNConfigSet s works. For that to work, we -// need to make sure that the set of ATNConfigs in two sets are equivalent. We can't -// know the order, so we do this inefficient hack. If this proves the point, then -// we can change the config set to a better structure. -func (b *BaseATNConfigSet) Compare(bs *BaseATNConfigSet) bool { - if len(b.configs) != len(bs.configs) { - return false - } - - for _, c := range b.configs { - found := false - for _, c2 := range bs.configs { - if c.Equals(c2) { - found = true - break - } - } - - if !found { - return false - } - - } - return true -} - -func (b *BaseATNConfigSet) Equals(other Collectable[ATNConfig]) bool { - if b == other { - return true - } else if _, ok := other.(*BaseATNConfigSet); !ok { - return false - } - - other2 := other.(*BaseATNConfigSet) - - return b.configs != nil && - b.fullCtx == other2.fullCtx && - b.uniqueAlt == other2.uniqueAlt && - b.conflictingAlts == other2.conflictingAlts && - b.hasSemanticContext == other2.hasSemanticContext && - b.dipsIntoOuterContext == other2.dipsIntoOuterContext && - b.Compare(other2) -} - -func (b *BaseATNConfigSet) Hash() int { - if b.readOnly { - if b.cachedHash == -1 { - b.cachedHash = b.hashCodeConfigs() - } - - return b.cachedHash - } - - return b.hashCodeConfigs() -} - -func (b *BaseATNConfigSet) hashCodeConfigs() int { - h := 1 - for _, config := range b.configs { - h = 31*h + config.Hash() - } - return h -} - -func (b *BaseATNConfigSet) Length() int { - return len(b.configs) -} - -func (b *BaseATNConfigSet) IsEmpty() bool { - return len(b.configs) == 0 -} - -func (b *BaseATNConfigSet) Contains(item ATNConfig) bool { - if b.configLookup == nil { - panic("not implemented for read-only sets") - } - - return b.configLookup.Contains(item) -} - -func (b *BaseATNConfigSet) ContainsFast(item ATNConfig) bool { - if b.configLookup == nil { - panic("not implemented for read-only sets") - } - - return b.configLookup.Contains(item) // TODO: containsFast is not implemented for Set -} - -func (b *BaseATNConfigSet) Clear() { - if b.readOnly { - panic("set is read-only") - } - - b.configs = make([]ATNConfig, 0) - b.cachedHash = -1 - b.configLookup = NewJStore[ATNConfig, Comparator[ATNConfig]](atnConfCompInst) -} - -func (b *BaseATNConfigSet) FullContext() bool { - return b.fullCtx -} - -func (b *BaseATNConfigSet) GetDipsIntoOuterContext() bool { - return b.dipsIntoOuterContext -} - -func (b *BaseATNConfigSet) SetDipsIntoOuterContext(v bool) { - b.dipsIntoOuterContext = v -} - -func (b *BaseATNConfigSet) GetUniqueAlt() int { - return b.uniqueAlt -} - -func (b *BaseATNConfigSet) SetUniqueAlt(v int) { - b.uniqueAlt = v -} - -func (b *BaseATNConfigSet) GetConflictingAlts() *BitSet { - return b.conflictingAlts -} - -func (b *BaseATNConfigSet) SetConflictingAlts(v *BitSet) { - b.conflictingAlts = v -} - -func (b *BaseATNConfigSet) ReadOnly() bool { - return b.readOnly -} - -func (b *BaseATNConfigSet) SetReadOnly(readOnly bool) { - b.readOnly = readOnly - - if readOnly { - b.configLookup = nil // Read only, so no need for the lookup cache - } -} - -func (b *BaseATNConfigSet) String() string { - s := "[" - - for i, c := range b.configs { - s += c.String() - - if i != len(b.configs)-1 { - s += ", " - } - } - - s += "]" - - if b.hasSemanticContext { - s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext) - } - - if b.uniqueAlt != ATNInvalidAltNumber { - s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt) - } - - if b.conflictingAlts != nil { - s += ",conflictingAlts=" + b.conflictingAlts.String() - } - - if b.dipsIntoOuterContext { - s += ",dipsIntoOuterContext" - } - - return s -} - -type OrderedATNConfigSet struct { - *BaseATNConfigSet -} - -func NewOrderedATNConfigSet() *OrderedATNConfigSet { - b := NewBaseATNConfigSet(false) - - // This set uses the standard Hash() and Equals() from ATNConfig - b.configLookup = NewJStore[ATNConfig, Comparator[ATNConfig]](aConfEqInst) - - return &OrderedATNConfigSet{BaseATNConfigSet: b} -} - -func hashATNConfig(i interface{}) int { - o := i.(ATNConfig) - hash := 7 - hash = 31*hash + o.GetState().GetStateNumber() - hash = 31*hash + o.GetAlt() - hash = 31*hash + o.GetSemanticContext().Hash() - return hash -} - -func equalATNConfigs(a, b interface{}) bool { - if a == nil || b == nil { - return false - } - - if a == b { - return true - } - - var ai, ok = a.(ATNConfig) - var bi, ok1 = b.(ATNConfig) - - if !ok || !ok1 { - return false - } - - if ai.GetState().GetStateNumber() != bi.GetState().GetStateNumber() { - return false - } - - if ai.GetAlt() != bi.GetAlt() { - return false - } - - return ai.GetSemanticContext().Equals(bi.GetSemanticContext()) -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/input_stream.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/input_stream.go deleted file mode 100644 index a8b889ced..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/input_stream.go +++ /dev/null @@ -1,113 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -type InputStream struct { - name string - index int - data []rune - size int -} - -func NewInputStream(data string) *InputStream { - - is := new(InputStream) - - is.name = "" - is.index = 0 - is.data = []rune(data) - is.size = len(is.data) // number of runes - - return is -} - -func (is *InputStream) reset() { - is.index = 0 -} - -func (is *InputStream) Consume() { - if is.index >= is.size { - // assert is.LA(1) == TokenEOF - panic("cannot consume EOF") - } - is.index++ -} - -func (is *InputStream) LA(offset int) int { - - if offset == 0 { - return 0 // nil - } - if offset < 0 { - offset++ // e.g., translate LA(-1) to use offset=0 - } - pos := is.index + offset - 1 - - if pos < 0 || pos >= is.size { // invalid - return TokenEOF - } - - return int(is.data[pos]) -} - -func (is *InputStream) LT(offset int) int { - return is.LA(offset) -} - -func (is *InputStream) Index() int { - return is.index -} - -func (is *InputStream) Size() int { - return is.size -} - -// mark/release do nothing we have entire buffer -func (is *InputStream) Mark() int { - return -1 -} - -func (is *InputStream) Release(marker int) { -} - -func (is *InputStream) Seek(index int) { - if index <= is.index { - is.index = index // just jump don't update stream state (line,...) - return - } - // seek forward - is.index = intMin(index, is.size) -} - -func (is *InputStream) GetText(start int, stop int) string { - if stop >= is.size { - stop = is.size - 1 - } - if start >= is.size { - return "" - } - - return string(is.data[start : stop+1]) -} - -func (is *InputStream) GetTextFromTokens(start, stop Token) string { - if start != nil && stop != nil { - return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex())) - } - - return "" -} - -func (is *InputStream) GetTextFromInterval(i *Interval) string { - return is.GetText(i.Start, i.Stop) -} - -func (*InputStream) GetSourceName() string { - return "Obtained from string" -} - -func (is *InputStream) String() string { - return string(is.data) -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/jcollect.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/jcollect.go deleted file mode 100644 index e5a74f0c6..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/jcollect.go +++ /dev/null @@ -1,198 +0,0 @@ -package antlr - -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -import ( - "sort" -) - -// Collectable is an interface that a struct should implement if it is to be -// usable as a key in these collections. -type Collectable[T any] interface { - Hash() int - Equals(other Collectable[T]) bool -} - -type Comparator[T any] interface { - Hash1(o T) int - Equals2(T, T) bool -} - -// JStore implements a container that allows the use of a struct to calculate the key -// for a collection of values akin to map. This is not meant to be a full-blown HashMap but just -// serve the needs of the ANTLR Go runtime. -// -// For ease of porting the logic of the runtime from the master target (Java), this collection -// operates in a similar way to Java, in that it can use any struct that supplies a Hash() and Equals() -// function as the key. The values are stored in a standard go map which internally is a form of hashmap -// itself, the key for the go map is the hash supplied by the key object. The collection is able to deal with -// hash conflicts by using a simple slice of values associated with the hash code indexed bucket. That isn't -// particularly efficient, but it is simple, and it works. As this is specifically for the ANTLR runtime, and -// we understand the requirements, then this is fine - this is not a general purpose collection. -type JStore[T any, C Comparator[T]] struct { - store map[int][]T - len int - comparator Comparator[T] -} - -func NewJStore[T any, C Comparator[T]](comparator Comparator[T]) *JStore[T, C] { - - if comparator == nil { - panic("comparator cannot be nil") - } - - s := &JStore[T, C]{ - store: make(map[int][]T, 1), - comparator: comparator, - } - return s -} - -// Put will store given value in the collection. Note that the key for storage is generated from -// the value itself - this is specifically because that is what ANTLR needs - this would not be useful -// as any kind of general collection. -// -// If the key has a hash conflict, then the value will be added to the slice of values associated with the -// hash, unless the value is already in the slice, in which case the existing value is returned. Value equivalence is -// tested by calling the equals() method on the key. -// -// # If the given value is already present in the store, then the existing value is returned as v and exists is set to true -// -// If the given value is not present in the store, then the value is added to the store and returned as v and exists is set to false. -func (s *JStore[T, C]) Put(value T) (v T, exists bool) { //nolint:ireturn - - kh := s.comparator.Hash1(value) - - for _, v1 := range s.store[kh] { - if s.comparator.Equals2(value, v1) { - return v1, true - } - } - s.store[kh] = append(s.store[kh], value) - s.len++ - return value, false -} - -// Get will return the value associated with the key - the type of the key is the same type as the value -// which would not generally be useful, but this is a specific thing for ANTLR where the key is -// generated using the object we are going to store. -func (s *JStore[T, C]) Get(key T) (T, bool) { //nolint:ireturn - - kh := s.comparator.Hash1(key) - - for _, v := range s.store[kh] { - if s.comparator.Equals2(key, v) { - return v, true - } - } - return key, false -} - -// Contains returns true if the given key is present in the store -func (s *JStore[T, C]) Contains(key T) bool { //nolint:ireturn - - _, present := s.Get(key) - return present -} - -func (s *JStore[T, C]) SortedSlice(less func(i, j T) bool) []T { - vs := make([]T, 0, len(s.store)) - for _, v := range s.store { - vs = append(vs, v...) - } - sort.Slice(vs, func(i, j int) bool { - return less(vs[i], vs[j]) - }) - - return vs -} - -func (s *JStore[T, C]) Each(f func(T) bool) { - for _, e := range s.store { - for _, v := range e { - f(v) - } - } -} - -func (s *JStore[T, C]) Len() int { - return s.len -} - -func (s *JStore[T, C]) Values() []T { - vs := make([]T, 0, len(s.store)) - for _, e := range s.store { - for _, v := range e { - vs = append(vs, v) - } - } - return vs -} - -type entry[K, V any] struct { - key K - val V -} - -type JMap[K, V any, C Comparator[K]] struct { - store map[int][]*entry[K, V] - len int - comparator Comparator[K] -} - -func NewJMap[K, V any, C Comparator[K]](comparator Comparator[K]) *JMap[K, V, C] { - return &JMap[K, V, C]{ - store: make(map[int][]*entry[K, V], 1), - comparator: comparator, - } -} - -func (m *JMap[K, V, C]) Put(key K, val V) { - kh := m.comparator.Hash1(key) - - m.store[kh] = append(m.store[kh], &entry[K, V]{key, val}) - m.len++ -} - -func (m *JMap[K, V, C]) Values() []V { - vs := make([]V, 0, len(m.store)) - for _, e := range m.store { - for _, v := range e { - vs = append(vs, v.val) - } - } - return vs -} - -func (m *JMap[K, V, C]) Get(key K) (V, bool) { - - var none V - kh := m.comparator.Hash1(key) - for _, e := range m.store[kh] { - if m.comparator.Equals2(e.key, key) { - return e.val, true - } - } - return none, false -} - -func (m *JMap[K, V, C]) Len() int { - return len(m.store) -} - -func (m *JMap[K, V, C]) Delete(key K) { - kh := m.comparator.Hash1(key) - for i, e := range m.store[kh] { - if m.comparator.Equals2(e.key, key) { - m.store[kh] = append(m.store[kh][:i], m.store[kh][i+1:]...) - m.len-- - return - } - } -} - -func (m *JMap[K, V, C]) Clear() { - m.store = make(map[int][]*entry[K, V]) -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_context.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_context.go deleted file mode 100644 index ba62af361..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_context.go +++ /dev/null @@ -1,806 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -import ( - "fmt" - "golang.org/x/exp/slices" - "strconv" -) - -// Represents {@code $} in local context prediction, which means wildcard. -// {@code//+x =//}. -// / -const ( - BasePredictionContextEmptyReturnState = 0x7FFFFFFF -) - -// Represents {@code $} in an array in full context mode, when {@code $} -// doesn't mean wildcard: {@code $ + x = [$,x]}. Here, -// {@code $} = {@link //EmptyReturnState}. -// / - -var ( - BasePredictionContextglobalNodeCount = 1 - BasePredictionContextid = BasePredictionContextglobalNodeCount -) - -type PredictionContext interface { - Hash() int - Equals(interface{}) bool - GetParent(int) PredictionContext - getReturnState(int) int - length() int - isEmpty() bool - hasEmptyPath() bool - String() string -} - -type BasePredictionContext struct { - cachedHash int -} - -func NewBasePredictionContext(cachedHash int) *BasePredictionContext { - pc := new(BasePredictionContext) - pc.cachedHash = cachedHash - - return pc -} - -func (b *BasePredictionContext) isEmpty() bool { - return false -} - -func calculateHash(parent PredictionContext, returnState int) int { - h := murmurInit(1) - h = murmurUpdate(h, parent.Hash()) - h = murmurUpdate(h, returnState) - return murmurFinish(h, 2) -} - -var _emptyPredictionContextHash int - -func init() { - _emptyPredictionContextHash = murmurInit(1) - _emptyPredictionContextHash = murmurFinish(_emptyPredictionContextHash, 0) -} - -func calculateEmptyHash() int { - return _emptyPredictionContextHash -} - -// Used to cache {@link BasePredictionContext} objects. Its used for the shared -// context cash associated with contexts in DFA states. This cache -// can be used for both lexers and parsers. - -type PredictionContextCache struct { - cache map[PredictionContext]PredictionContext -} - -func NewPredictionContextCache() *PredictionContextCache { - t := new(PredictionContextCache) - t.cache = make(map[PredictionContext]PredictionContext) - return t -} - -// Add a context to the cache and return it. If the context already exists, -// return that one instead and do not add a Newcontext to the cache. -// Protect shared cache from unsafe thread access. -func (p *PredictionContextCache) add(ctx PredictionContext) PredictionContext { - if ctx == BasePredictionContextEMPTY { - return BasePredictionContextEMPTY - } - existing := p.cache[ctx] - if existing != nil { - return existing - } - p.cache[ctx] = ctx - return ctx -} - -func (p *PredictionContextCache) Get(ctx PredictionContext) PredictionContext { - return p.cache[ctx] -} - -func (p *PredictionContextCache) length() int { - return len(p.cache) -} - -type SingletonPredictionContext interface { - PredictionContext -} - -type BaseSingletonPredictionContext struct { - *BasePredictionContext - - parentCtx PredictionContext - returnState int -} - -func NewBaseSingletonPredictionContext(parent PredictionContext, returnState int) *BaseSingletonPredictionContext { - var cachedHash int - if parent != nil { - cachedHash = calculateHash(parent, returnState) - } else { - cachedHash = calculateEmptyHash() - } - - s := new(BaseSingletonPredictionContext) - s.BasePredictionContext = NewBasePredictionContext(cachedHash) - - s.parentCtx = parent - s.returnState = returnState - - return s -} - -func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext { - if returnState == BasePredictionContextEmptyReturnState && parent == nil { - // someone can pass in the bits of an array ctx that mean $ - return BasePredictionContextEMPTY - } - - return NewBaseSingletonPredictionContext(parent, returnState) -} - -func (b *BaseSingletonPredictionContext) length() int { - return 1 -} - -func (b *BaseSingletonPredictionContext) GetParent(index int) PredictionContext { - return b.parentCtx -} - -func (b *BaseSingletonPredictionContext) getReturnState(index int) int { - return b.returnState -} - -func (b *BaseSingletonPredictionContext) hasEmptyPath() bool { - return b.returnState == BasePredictionContextEmptyReturnState -} - -func (b *BaseSingletonPredictionContext) Hash() int { - return b.cachedHash -} - -func (b *BaseSingletonPredictionContext) Equals(other interface{}) bool { - if b == other { - return true - } - if _, ok := other.(*BaseSingletonPredictionContext); !ok { - return false - } - - otherP := other.(*BaseSingletonPredictionContext) - - if b.returnState != otherP.getReturnState(0) { - return false - } - if b.parentCtx == nil { - return otherP.parentCtx == nil - } - - return b.parentCtx.Equals(otherP.parentCtx) -} - -func (b *BaseSingletonPredictionContext) String() string { - var up string - - if b.parentCtx == nil { - up = "" - } else { - up = b.parentCtx.String() - } - - if len(up) == 0 { - if b.returnState == BasePredictionContextEmptyReturnState { - return "$" - } - - return strconv.Itoa(b.returnState) - } - - return strconv.Itoa(b.returnState) + " " + up -} - -var BasePredictionContextEMPTY = NewEmptyPredictionContext() - -type EmptyPredictionContext struct { - *BaseSingletonPredictionContext -} - -func NewEmptyPredictionContext() *EmptyPredictionContext { - - p := new(EmptyPredictionContext) - - p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEmptyReturnState) - p.cachedHash = calculateEmptyHash() - return p -} - -func (e *EmptyPredictionContext) isEmpty() bool { - return true -} - -func (e *EmptyPredictionContext) GetParent(index int) PredictionContext { - return nil -} - -func (e *EmptyPredictionContext) getReturnState(index int) int { - return e.returnState -} - -func (e *EmptyPredictionContext) Hash() int { - return e.cachedHash -} - -func (e *EmptyPredictionContext) Equals(other interface{}) bool { - return e == other -} - -func (e *EmptyPredictionContext) String() string { - return "$" -} - -type ArrayPredictionContext struct { - *BasePredictionContext - - parents []PredictionContext - returnStates []int -} - -func NewArrayPredictionContext(parents []PredictionContext, returnStates []int) *ArrayPredictionContext { - // Parent can be nil only if full ctx mode and we make an array - // from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using - // nil parent and - // returnState == {@link //EmptyReturnState}. - hash := murmurInit(1) - - for _, parent := range parents { - hash = murmurUpdate(hash, parent.Hash()) - } - - for _, returnState := range returnStates { - hash = murmurUpdate(hash, returnState) - } - - hash = murmurFinish(hash, len(parents)<<1) - - c := new(ArrayPredictionContext) - c.BasePredictionContext = NewBasePredictionContext(hash) - - c.parents = parents - c.returnStates = returnStates - - return c -} - -func (a *ArrayPredictionContext) GetReturnStates() []int { - return a.returnStates -} - -func (a *ArrayPredictionContext) hasEmptyPath() bool { - return a.getReturnState(a.length()-1) == BasePredictionContextEmptyReturnState -} - -func (a *ArrayPredictionContext) isEmpty() bool { - // since EmptyReturnState can only appear in the last position, we - // don't need to verify that size==1 - return a.returnStates[0] == BasePredictionContextEmptyReturnState -} - -func (a *ArrayPredictionContext) length() int { - return len(a.returnStates) -} - -func (a *ArrayPredictionContext) GetParent(index int) PredictionContext { - return a.parents[index] -} - -func (a *ArrayPredictionContext) getReturnState(index int) int { - return a.returnStates[index] -} - -// Equals is the default comparison function for ArrayPredictionContext when no specialized -// implementation is needed for a collection -func (a *ArrayPredictionContext) Equals(o interface{}) bool { - if a == o { - return true - } - other, ok := o.(*ArrayPredictionContext) - if !ok { - return false - } - if a.cachedHash != other.Hash() { - return false // can't be same if hash is different - } - - // Must compare the actual array elements and not just the array address - // - return slices.Equal(a.returnStates, other.returnStates) && - slices.EqualFunc(a.parents, other.parents, func(x, y PredictionContext) bool { - return x.Equals(y) - }) -} - -// Hash is the default hash function for ArrayPredictionContext when no specialized -// implementation is needed for a collection -func (a *ArrayPredictionContext) Hash() int { - return a.BasePredictionContext.cachedHash -} - -func (a *ArrayPredictionContext) String() string { - if a.isEmpty() { - return "[]" - } - - s := "[" - for i := 0; i < len(a.returnStates); i++ { - if i > 0 { - s = s + ", " - } - if a.returnStates[i] == BasePredictionContextEmptyReturnState { - s = s + "$" - continue - } - s = s + strconv.Itoa(a.returnStates[i]) - if a.parents[i] != nil { - s = s + " " + a.parents[i].String() - } else { - s = s + "nil" - } - } - - return s + "]" -} - -// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph. -// Return {@link //EMPTY} if {@code outerContext} is empty or nil. -// / -func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) PredictionContext { - if outerContext == nil { - outerContext = ParserRuleContextEmpty - } - // if we are in RuleContext of start rule, s, then BasePredictionContext - // is EMPTY. Nobody called us. (if we are empty, return empty) - if outerContext.GetParent() == nil || outerContext == ParserRuleContextEmpty { - return BasePredictionContextEMPTY - } - // If we have a parent, convert it to a BasePredictionContext graph - parent := predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext)) - state := a.states[outerContext.GetInvokingState()] - transition := state.GetTransitions()[0] - - return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber()) -} - -func merge(a, b PredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext { - - // Share same graph if both same - // - if a == b || a.Equals(b) { - return a - } - - // In Java, EmptyPredictionContext inherits from SingletonPredictionContext, and so the test - // in java for SingletonPredictionContext will succeed and a new ArrayPredictionContext will be created - // from it. - // In go, EmptyPredictionContext does not equate to SingletonPredictionContext and so that conversion - // will fail. We need to test for both Empty and Singleton and create an ArrayPredictionContext from - // either of them. - - ac, ok1 := a.(*BaseSingletonPredictionContext) - bc, ok2 := b.(*BaseSingletonPredictionContext) - - if ok1 && ok2 { - return mergeSingletons(ac, bc, rootIsWildcard, mergeCache) - } - // At least one of a or b is array - // If one is $ and rootIsWildcard, return $ as// wildcard - if rootIsWildcard { - if _, ok := a.(*EmptyPredictionContext); ok { - return a - } - if _, ok := b.(*EmptyPredictionContext); ok { - return b - } - } - - // Convert Singleton or Empty so both are arrays to normalize - We should not use the existing parameters - // here. - // - // TODO: I think that maybe the Prediction Context structs should be redone as there is a chance we will see this mess again - maybe redo the logic here - - var arp, arb *ArrayPredictionContext - var ok bool - if arp, ok = a.(*ArrayPredictionContext); ok { - } else if _, ok = a.(*BaseSingletonPredictionContext); ok { - arp = NewArrayPredictionContext([]PredictionContext{a.GetParent(0)}, []int{a.getReturnState(0)}) - } else if _, ok = a.(*EmptyPredictionContext); ok { - arp = NewArrayPredictionContext([]PredictionContext{}, []int{}) - } - - if arb, ok = b.(*ArrayPredictionContext); ok { - } else if _, ok = b.(*BaseSingletonPredictionContext); ok { - arb = NewArrayPredictionContext([]PredictionContext{b.GetParent(0)}, []int{b.getReturnState(0)}) - } else if _, ok = b.(*EmptyPredictionContext); ok { - arb = NewArrayPredictionContext([]PredictionContext{}, []int{}) - } - - // Both arp and arb - return mergeArrays(arp, arb, rootIsWildcard, mergeCache) -} - -// Merge two {@link SingletonBasePredictionContext} instances. -// -//

Stack tops equal, parents merge is same return left graph.
-//

-// -//

Same stack top, parents differ merge parents giving array node, then -// remainders of those graphs. A Newroot node is created to point to the -// merged parents.
-//

-// -//

Different stack tops pointing to same parent. Make array node for the -// root where both element in the root point to the same (original) -// parent.
-//

-// -//

Different stack tops pointing to different parents. Make array node for -// the root where each element points to the corresponding original -// parent.
-//

-// -// @param a the first {@link SingletonBasePredictionContext} -// @param b the second {@link SingletonBasePredictionContext} -// @param rootIsWildcard {@code true} if this is a local-context merge, -// otherwise false to indicate a full-context merge -// @param mergeCache -// / -func mergeSingletons(a, b *BaseSingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext { - if mergeCache != nil { - previous := mergeCache.Get(a.Hash(), b.Hash()) - if previous != nil { - return previous.(PredictionContext) - } - previous = mergeCache.Get(b.Hash(), a.Hash()) - if previous != nil { - return previous.(PredictionContext) - } - } - - rootMerge := mergeRoot(a, b, rootIsWildcard) - if rootMerge != nil { - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), rootMerge) - } - return rootMerge - } - if a.returnState == b.returnState { - parent := merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache) - // if parent is same as existing a or b parent or reduced to a parent, - // return it - if parent == a.parentCtx { - return a // ax + bx = ax, if a=b - } - if parent == b.parentCtx { - return b // ax + bx = bx, if a=b - } - // else: ax + ay = a'[x,y] - // merge parents x and y, giving array node with x,y then remainders - // of those graphs. dup a, a' points at merged array - // Newjoined parent so create Newsingleton pointing to it, a' - spc := SingletonBasePredictionContextCreate(parent, a.returnState) - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), spc) - } - return spc - } - // a != b payloads differ - // see if we can collapse parents due to $+x parents if local ctx - var singleParent PredictionContext - if a == b || (a.parentCtx != nil && a.parentCtx == b.parentCtx) { // ax + - // bx = - // [a,b]x - singleParent = a.parentCtx - } - if singleParent != nil { // parents are same - // sort payloads and use same parent - payloads := []int{a.returnState, b.returnState} - if a.returnState > b.returnState { - payloads[0] = b.returnState - payloads[1] = a.returnState - } - parents := []PredictionContext{singleParent, singleParent} - apc := NewArrayPredictionContext(parents, payloads) - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), apc) - } - return apc - } - // parents differ and can't merge them. Just pack together - // into array can't merge. - // ax + by = [ax,by] - payloads := []int{a.returnState, b.returnState} - parents := []PredictionContext{a.parentCtx, b.parentCtx} - if a.returnState > b.returnState { // sort by payload - payloads[0] = b.returnState - payloads[1] = a.returnState - parents = []PredictionContext{b.parentCtx, a.parentCtx} - } - apc := NewArrayPredictionContext(parents, payloads) - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), apc) - } - return apc -} - -// Handle case where at least one of {@code a} or {@code b} is -// {@link //EMPTY}. In the following diagrams, the symbol {@code $} is used -// to represent {@link //EMPTY}. -// -//

Local-Context Merges

-// -//

These local-context merge operations are used when {@code rootIsWildcard} -// is true.

-// -//

{@link //EMPTY} is superset of any graph return {@link //EMPTY}.
-//

-// -//

{@link //EMPTY} and anything is {@code //EMPTY}, so merged parent is -// {@code //EMPTY} return left graph.
-//

-// -//

Special case of last merge if local context.
-//

-// -//

Full-Context Merges

-// -//

These full-context merge operations are used when {@code rootIsWildcard} -// is false.

-// -//

-// -//

Must keep all contexts {@link //EMPTY} in array is a special value (and -// nil parent).
-//

-// -//

-// -// @param a the first {@link SingletonBasePredictionContext} -// @param b the second {@link SingletonBasePredictionContext} -// @param rootIsWildcard {@code true} if this is a local-context merge, -// otherwise false to indicate a full-context merge -// / -func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionContext { - if rootIsWildcard { - if a == BasePredictionContextEMPTY { - return BasePredictionContextEMPTY // // + b =// - } - if b == BasePredictionContextEMPTY { - return BasePredictionContextEMPTY // a +// =// - } - } else { - if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY { - return BasePredictionContextEMPTY // $ + $ = $ - } else if a == BasePredictionContextEMPTY { // $ + x = [$,x] - payloads := []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState} - parents := []PredictionContext{b.GetParent(-1), nil} - return NewArrayPredictionContext(parents, payloads) - } else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present) - payloads := []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState} - parents := []PredictionContext{a.GetParent(-1), nil} - return NewArrayPredictionContext(parents, payloads) - } - } - return nil -} - -// Merge two {@link ArrayBasePredictionContext} instances. -// -//

Different tops, different parents.
-//

-// -//

Shared top, same parents.
-//

-// -//

Shared top, different parents.
-//

-// -//

Shared top, all shared parents.
-//

-// -//

Equal tops, merge parents and reduce top to -// {@link SingletonBasePredictionContext}.
-//

-// / -func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) PredictionContext { - if mergeCache != nil { - previous := mergeCache.Get(a.Hash(), b.Hash()) - if previous != nil { - if ParserATNSimulatorTraceATNSim { - fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous") - } - return previous.(PredictionContext) - } - previous = mergeCache.Get(b.Hash(), a.Hash()) - if previous != nil { - if ParserATNSimulatorTraceATNSim { - fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous") - } - return previous.(PredictionContext) - } - } - // merge sorted payloads a + b => M - i := 0 // walks a - j := 0 // walks b - k := 0 // walks target M array - - mergedReturnStates := make([]int, len(a.returnStates)+len(b.returnStates)) - mergedParents := make([]PredictionContext, len(a.returnStates)+len(b.returnStates)) - // walk and merge to yield mergedParents, mergedReturnStates - for i < len(a.returnStates) && j < len(b.returnStates) { - aParent := a.parents[i] - bParent := b.parents[j] - if a.returnStates[i] == b.returnStates[j] { - // same payload (stack tops are equal), must yield merged singleton - payload := a.returnStates[i] - // $+$ = $ - bothDollars := payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil - axAX := aParent != nil && bParent != nil && aParent == bParent // ax+ax - // -> - // ax - if bothDollars || axAX { - mergedParents[k] = aParent // choose left - mergedReturnStates[k] = payload - } else { // ax+ay -> a'[x,y] - mergedParent := merge(aParent, bParent, rootIsWildcard, mergeCache) - mergedParents[k] = mergedParent - mergedReturnStates[k] = payload - } - i++ // hop over left one as usual - j++ // but also Skip one in right side since we merge - } else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M - mergedParents[k] = aParent - mergedReturnStates[k] = a.returnStates[i] - i++ - } else { // b > a, copy b[j] to M - mergedParents[k] = bParent - mergedReturnStates[k] = b.returnStates[j] - j++ - } - k++ - } - // copy over any payloads remaining in either array - if i < len(a.returnStates) { - for p := i; p < len(a.returnStates); p++ { - mergedParents[k] = a.parents[p] - mergedReturnStates[k] = a.returnStates[p] - k++ - } - } else { - for p := j; p < len(b.returnStates); p++ { - mergedParents[k] = b.parents[p] - mergedReturnStates[k] = b.returnStates[p] - k++ - } - } - // trim merged if we combined a few that had same stack tops - if k < len(mergedParents) { // write index < last position trim - if k == 1 { // for just one merged element, return singleton top - pc := SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0]) - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), pc) - } - return pc - } - mergedParents = mergedParents[0:k] - mergedReturnStates = mergedReturnStates[0:k] - } - - M := NewArrayPredictionContext(mergedParents, mergedReturnStates) - - // if we created same array as a or b, return that instead - // TODO: track whether this is possible above during merge sort for speed - // TODO: In go, I do not think we can just do M == xx as M is a brand new allocation. This could be causing allocation problems - if M == a { - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), a) - } - if ParserATNSimulatorTraceATNSim { - fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> a") - } - return a - } - if M == b { - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), b) - } - if ParserATNSimulatorTraceATNSim { - fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> b") - } - return b - } - combineCommonParents(mergedParents) - - if mergeCache != nil { - mergeCache.set(a.Hash(), b.Hash(), M) - } - if ParserATNSimulatorTraceATNSim { - fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> " + M.String()) - } - return M -} - -// Make pass over all M {@code parents} merge any {@code equals()} -// ones. -// / -func combineCommonParents(parents []PredictionContext) { - uniqueParents := make(map[PredictionContext]PredictionContext) - - for p := 0; p < len(parents); p++ { - parent := parents[p] - if uniqueParents[parent] == nil { - uniqueParents[parent] = parent - } - } - for q := 0; q < len(parents); q++ { - parents[q] = uniqueParents[parents[q]] - } -} - -func getCachedBasePredictionContext(context PredictionContext, contextCache *PredictionContextCache, visited map[PredictionContext]PredictionContext) PredictionContext { - - if context.isEmpty() { - return context - } - existing := visited[context] - if existing != nil { - return existing - } - existing = contextCache.Get(context) - if existing != nil { - visited[context] = existing - return existing - } - changed := false - parents := make([]PredictionContext, context.length()) - for i := 0; i < len(parents); i++ { - parent := getCachedBasePredictionContext(context.GetParent(i), contextCache, visited) - if changed || parent != context.GetParent(i) { - if !changed { - parents = make([]PredictionContext, context.length()) - for j := 0; j < context.length(); j++ { - parents[j] = context.GetParent(j) - } - changed = true - } - parents[i] = parent - } - } - if !changed { - contextCache.add(context) - visited[context] = context - return context - } - var updated PredictionContext - if len(parents) == 0 { - updated = BasePredictionContextEMPTY - } else if len(parents) == 1 { - updated = SingletonBasePredictionContextCreate(parents[0], context.getReturnState(0)) - } else { - updated = NewArrayPredictionContext(parents, context.(*ArrayPredictionContext).GetReturnStates()) - } - contextCache.add(updated) - visited[updated] = updated - visited[context] = updated - - return updated -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_mode.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_mode.go deleted file mode 100644 index 7b9b72fab..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/prediction_mode.go +++ /dev/null @@ -1,529 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -// This enumeration defines the prediction modes available in ANTLR 4 along with -// utility methods for analyzing configuration sets for conflicts and/or -// ambiguities. - -const ( - // - // The SLL(*) prediction mode. This prediction mode ignores the current - // parser context when making predictions. This is the fastest prediction - // mode, and provides correct results for many grammars. This prediction - // mode is more powerful than the prediction mode provided by ANTLR 3, but - // may result in syntax errors for grammar and input combinations which are - // not SLL. - // - //

- // When using this prediction mode, the parser will either return a correct - // parse tree (i.e. the same parse tree that would be returned with the - // {@link //LL} prediction mode), or it will Report a syntax error. If a - // syntax error is encountered when using the {@link //SLL} prediction mode, - // it may be due to either an actual syntax error in the input or indicate - // that the particular combination of grammar and input requires the more - // powerful {@link //LL} prediction abilities to complete successfully.

- // - //

- // This prediction mode does not provide any guarantees for prediction - // behavior for syntactically-incorrect inputs.

- // - PredictionModeSLL = 0 - // - // The LL(*) prediction mode. This prediction mode allows the current parser - // context to be used for resolving SLL conflicts that occur during - // prediction. This is the fastest prediction mode that guarantees correct - // parse results for all combinations of grammars with syntactically correct - // inputs. - // - //

- // When using this prediction mode, the parser will make correct decisions - // for all syntactically-correct grammar and input combinations. However, in - // cases where the grammar is truly ambiguous this prediction mode might not - // Report a precise answer for exactly which alternatives are - // ambiguous.

- // - //

- // This prediction mode does not provide any guarantees for prediction - // behavior for syntactically-incorrect inputs.

- // - PredictionModeLL = 1 - // - // The LL(*) prediction mode with exact ambiguity detection. In addition to - // the correctness guarantees provided by the {@link //LL} prediction mode, - // this prediction mode instructs the prediction algorithm to determine the - // complete and exact set of ambiguous alternatives for every ambiguous - // decision encountered while parsing. - // - //

- // This prediction mode may be used for diagnosing ambiguities during - // grammar development. Due to the performance overhead of calculating sets - // of ambiguous alternatives, this prediction mode should be avoided when - // the exact results are not necessary.

- // - //

- // This prediction mode does not provide any guarantees for prediction - // behavior for syntactically-incorrect inputs.

- // - PredictionModeLLExactAmbigDetection = 2 -) - -// Computes the SLL prediction termination condition. -// -//

-// This method computes the SLL prediction termination condition for both of -// the following cases.

-// -// -// -//

COMBINED SLL+LL PARSING

-// -//

When LL-fallback is enabled upon SLL conflict, correct predictions are -// ensured regardless of how the termination condition is computed by this -// method. Due to the substantially higher cost of LL prediction, the -// prediction should only fall back to LL when the additional lookahead -// cannot lead to a unique SLL prediction.

-// -//

Assuming combined SLL+LL parsing, an SLL configuration set with only -// conflicting subsets should fall back to full LL, even if the -// configuration sets don't resolve to the same alternative (e.g. -// {@code {1,2}} and {@code {3,4}}. If there is at least one non-conflicting -// configuration, SLL could continue with the hopes that more lookahead will -// resolve via one of those non-conflicting configurations.

-// -//

Here's the prediction termination rule them: SLL (for SLL+LL parsing) -// stops when it sees only conflicting configuration subsets. In contrast, -// full LL keeps going when there is uncertainty.

-// -//

HEURISTIC

-// -//

As a heuristic, we stop prediction when we see any conflicting subset -// unless we see a state that only has one alternative associated with it. -// The single-alt-state thing lets prediction continue upon rules like -// (otherwise, it would admit defeat too soon):

-// -//

{@code [12|1|[], 6|2|[], 12|2|[]]. s : (ID | ID ID?) ” }

-// -//

When the ATN simulation reaches the state before {@code ”}, it has a -// DFA state that looks like: {@code [12|1|[], 6|2|[], 12|2|[]]}. Naturally -// {@code 12|1|[]} and {@code 12|2|[]} conflict, but we cannot stop -// processing this node because alternative to has another way to continue, -// via {@code [6|2|[]]}.

-// -//

It also let's us continue for this rule:

-// -//

{@code [1|1|[], 1|2|[], 8|3|[]] a : A | A | A B }

-// -//

After Matching input A, we reach the stop state for rule A, state 1. -// State 8 is the state right before B. Clearly alternatives 1 and 2 -// conflict and no amount of further lookahead will separate the two. -// However, alternative 3 will be able to continue and so we do not stop -// working on this state. In the previous example, we're concerned with -// states associated with the conflicting alternatives. Here alt 3 is not -// associated with the conflicting configs, but since we can continue -// looking for input reasonably, don't declare the state done.

-// -//

PURE SLL PARSING

-// -//

To handle pure SLL parsing, all we have to do is make sure that we -// combine stack contexts for configurations that differ only by semantic -// predicate. From there, we can do the usual SLL termination heuristic.

-// -//

PREDICATES IN SLL+LL PARSING

-// -//

SLL decisions don't evaluate predicates until after they reach DFA stop -// states because they need to create the DFA cache that works in all -// semantic situations. In contrast, full LL evaluates predicates collected -// during start state computation so it can ignore predicates thereafter. -// This means that SLL termination detection can totally ignore semantic -// predicates.

-// -//

Implementation-wise, {@link ATNConfigSet} combines stack contexts but not -// semantic predicate contexts so we might see two configurations like the -// following.

-// -//

{@code (s, 1, x, {}), (s, 1, x', {p})}

-// -//

Before testing these configurations against others, we have to merge -// {@code x} and {@code x'} (without modifying the existing configurations). -// For example, we test {@code (x+x')==x”} when looking for conflicts in -// the following configurations.

-// -//

{@code (s, 1, x, {}), (s, 1, x', {p}), (s, 2, x”, {})}

-// -//

If the configuration set has predicates (as indicated by -// {@link ATNConfigSet//hasSemanticContext}), this algorithm makes a copy of -// the configurations to strip out all of the predicates so that a standard -// {@link ATNConfigSet} will merge everything ignoring predicates.

-func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs ATNConfigSet) bool { - // Configs in rule stop states indicate reaching the end of the decision - // rule (local context) or end of start rule (full context). If all - // configs meet this condition, then none of the configurations is able - // to Match additional input so we terminate prediction. - // - if PredictionModeallConfigsInRuleStopStates(configs) { - return true - } - // pure SLL mode parsing - if mode == PredictionModeSLL { - // Don't bother with combining configs from different semantic - // contexts if we can fail over to full LL costs more time - // since we'll often fail over anyway. - if configs.HasSemanticContext() { - // dup configs, tossing out semantic predicates - dup := NewBaseATNConfigSet(false) - for _, c := range configs.GetItems() { - - // NewBaseATNConfig({semanticContext:}, c) - c = NewBaseATNConfig2(c, SemanticContextNone) - dup.Add(c, nil) - } - configs = dup - } - // now we have combined contexts for configs with dissimilar preds - } - // pure SLL or combined SLL+LL mode parsing - altsets := PredictionModegetConflictingAltSubsets(configs) - return PredictionModehasConflictingAltSet(altsets) && !PredictionModehasStateAssociatedWithOneAlt(configs) -} - -// Checks if any configuration in {@code configs} is in a -// {@link RuleStopState}. Configurations meeting this condition have reached -// the end of the decision rule (local context) or end of start rule (full -// context). -// -// @param configs the configuration set to test -// @return {@code true} if any configuration in {@code configs} is in a -// {@link RuleStopState}, otherwise {@code false} -func PredictionModehasConfigInRuleStopState(configs ATNConfigSet) bool { - for _, c := range configs.GetItems() { - if _, ok := c.GetState().(*RuleStopState); ok { - return true - } - } - return false -} - -// Checks if all configurations in {@code configs} are in a -// {@link RuleStopState}. Configurations meeting this condition have reached -// the end of the decision rule (local context) or end of start rule (full -// context). -// -// @param configs the configuration set to test -// @return {@code true} if all configurations in {@code configs} are in a -// {@link RuleStopState}, otherwise {@code false} -func PredictionModeallConfigsInRuleStopStates(configs ATNConfigSet) bool { - - for _, c := range configs.GetItems() { - if _, ok := c.GetState().(*RuleStopState); !ok { - return false - } - } - return true -} - -// Full LL prediction termination. -// -//

Can we stop looking ahead during ATN simulation or is there some -// uncertainty as to which alternative we will ultimately pick, after -// consuming more input? Even if there are partial conflicts, we might know -// that everything is going to resolve to the same minimum alternative. That -// means we can stop since no more lookahead will change that fact. On the -// other hand, there might be multiple conflicts that resolve to different -// minimums. That means we need more look ahead to decide which of those -// alternatives we should predict.

-// -//

The basic idea is to split the set of configurations {@code C}, into -// conflicting subsets {@code (s, _, ctx, _)} and singleton subsets with -// non-conflicting configurations. Two configurations conflict if they have -// identical {@link ATNConfig//state} and {@link ATNConfig//context} values -// but different {@link ATNConfig//alt} value, e.g. {@code (s, i, ctx, _)} -// and {@code (s, j, ctx, _)} for {@code i!=j}.

-// -//

Reduce these configuration subsets to the set of possible alternatives. -// You can compute the alternative subsets in one pass as follows:

-// -//

{@code A_s,ctx = {i | (s, i, ctx, _)}} for each configuration in -// {@code C} holding {@code s} and {@code ctx} fixed.

-// -//

Or in pseudo-code, for each configuration {@code c} in {@code C}:

-// -//
-// map[c] U= c.{@link ATNConfig//alt alt} // map hash/equals uses s and x, not
-// alt and not pred
-// 
-// -//

The values in {@code map} are the set of {@code A_s,ctx} sets.

-// -//

If {@code |A_s,ctx|=1} then there is no conflict associated with -// {@code s} and {@code ctx}.

-// -//

Reduce the subsets to singletons by choosing a minimum of each subset. If -// the union of these alternative subsets is a singleton, then no amount of -// more lookahead will help us. We will always pick that alternative. If, -// however, there is more than one alternative, then we are uncertain which -// alternative to predict and must continue looking for resolution. We may -// or may not discover an ambiguity in the future, even if there are no -// conflicting subsets this round.

-// -//

The biggest sin is to terminate early because it means we've made a -// decision but were uncertain as to the eventual outcome. We haven't used -// enough lookahead. On the other hand, announcing a conflict too late is no -// big deal you will still have the conflict. It's just inefficient. It -// might even look until the end of file.

-// -//

No special consideration for semantic predicates is required because -// predicates are evaluated on-the-fly for full LL prediction, ensuring that -// no configuration contains a semantic context during the termination -// check.

-// -//

CONFLICTING CONFIGS

-// -//

Two configurations {@code (s, i, x)} and {@code (s, j, x')}, conflict -// when {@code i!=j} but {@code x=x'}. Because we merge all -// {@code (s, i, _)} configurations together, that means that there are at -// most {@code n} configurations associated with state {@code s} for -// {@code n} possible alternatives in the decision. The merged stacks -// complicate the comparison of configuration contexts {@code x} and -// {@code x'}. Sam checks to see if one is a subset of the other by calling -// merge and checking to see if the merged result is either {@code x} or -// {@code x'}. If the {@code x} associated with lowest alternative {@code i} -// is the superset, then {@code i} is the only possible prediction since the -// others resolve to {@code min(i)} as well. However, if {@code x} is -// associated with {@code j>i} then at least one stack configuration for -// {@code j} is not in conflict with alternative {@code i}. The algorithm -// should keep going, looking for more lookahead due to the uncertainty.

-// -//

For simplicity, I'm doing a equality check between {@code x} and -// {@code x'} that lets the algorithm continue to consume lookahead longer -// than necessary. The reason I like the equality is of course the -// simplicity but also because that is the test you need to detect the -// alternatives that are actually in conflict.

-// -//

CONTINUE/STOP RULE

-// -//

Continue if union of resolved alternative sets from non-conflicting and -// conflicting alternative subsets has more than one alternative. We are -// uncertain about which alternative to predict.

-// -//

The complete set of alternatives, {@code [i for (_,i,_)]}, tells us which -// alternatives are still in the running for the amount of input we've -// consumed at this point. The conflicting sets let us to strip away -// configurations that won't lead to more states because we resolve -// conflicts to the configuration with a minimum alternate for the -// conflicting set.

-// -//

CASES

-// -// -// -//

EXACT AMBIGUITY DETECTION

-// -//

If all states Report the same conflicting set of alternatives, then we -// know we have the exact ambiguity set.

-// -//

|A_i|>1 and -// A_i = A_j for all i, j.

-// -//

In other words, we continue examining lookahead until all {@code A_i} -// have more than one alternative and all {@code A_i} are the same. If -// {@code A={{1,2}, {1,3}}}, then regular LL prediction would terminate -// because the resolved set is {@code {1}}. To determine what the real -// ambiguity is, we have to know whether the ambiguity is between one and -// two or one and three so we keep going. We can only stop prediction when -// we need exact ambiguity detection when the sets look like -// {@code A={{1,2}}} or {@code {{1,2},{1,2}}}, etc...

-func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) int { - return PredictionModegetSingleViableAlt(altsets) -} - -// Determines if every alternative subset in {@code altsets} contains more -// than one alternative. -// -// @param altsets a collection of alternative subsets -// @return {@code true} if every {@link BitSet} in {@code altsets} has -// {@link BitSet//cardinality cardinality} > 1, otherwise {@code false} -func PredictionModeallSubsetsConflict(altsets []*BitSet) bool { - return !PredictionModehasNonConflictingAltSet(altsets) -} - -// Determines if any single alternative subset in {@code altsets} contains -// exactly one alternative. -// -// @param altsets a collection of alternative subsets -// @return {@code true} if {@code altsets} contains a {@link BitSet} with -// {@link BitSet//cardinality cardinality} 1, otherwise {@code false} -func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool { - for i := 0; i < len(altsets); i++ { - alts := altsets[i] - if alts.length() == 1 { - return true - } - } - return false -} - -// Determines if any single alternative subset in {@code altsets} contains -// more than one alternative. -// -// @param altsets a collection of alternative subsets -// @return {@code true} if {@code altsets} contains a {@link BitSet} with -// {@link BitSet//cardinality cardinality} > 1, otherwise {@code false} -func PredictionModehasConflictingAltSet(altsets []*BitSet) bool { - for i := 0; i < len(altsets); i++ { - alts := altsets[i] - if alts.length() > 1 { - return true - } - } - return false -} - -// Determines if every alternative subset in {@code altsets} is equivalent. -// -// @param altsets a collection of alternative subsets -// @return {@code true} if every member of {@code altsets} is equal to the -// others, otherwise {@code false} -func PredictionModeallSubsetsEqual(altsets []*BitSet) bool { - var first *BitSet - - for i := 0; i < len(altsets); i++ { - alts := altsets[i] - if first == nil { - first = alts - } else if alts != first { - return false - } - } - - return true -} - -// Returns the unique alternative predicted by all alternative subsets in -// {@code altsets}. If no such alternative exists, this method returns -// {@link ATN//INVALID_ALT_NUMBER}. -// -// @param altsets a collection of alternative subsets -func PredictionModegetUniqueAlt(altsets []*BitSet) int { - all := PredictionModeGetAlts(altsets) - if all.length() == 1 { - return all.minValue() - } - - return ATNInvalidAltNumber -} - -// Gets the complete set of represented alternatives for a collection of -// alternative subsets. This method returns the union of each {@link BitSet} -// in {@code altsets}. -// -// @param altsets a collection of alternative subsets -// @return the set of represented alternatives in {@code altsets} -func PredictionModeGetAlts(altsets []*BitSet) *BitSet { - all := NewBitSet() - for _, alts := range altsets { - all.or(alts) - } - return all -} - -// PredictionModegetConflictingAltSubsets gets the conflicting alt subsets from a configuration set. -// For each configuration {@code c} in {@code configs}: -// -//
-// map[c] U= c.{@link ATNConfig//alt alt} // map hash/equals uses s and x, not
-// alt and not pred
-// 
-func PredictionModegetConflictingAltSubsets(configs ATNConfigSet) []*BitSet { - configToAlts := NewJMap[ATNConfig, *BitSet, *ATNAltConfigComparator[ATNConfig]](atnAltCfgEqInst) - - for _, c := range configs.GetItems() { - - alts, ok := configToAlts.Get(c) - if !ok { - alts = NewBitSet() - configToAlts.Put(c, alts) - } - alts.add(c.GetAlt()) - } - - return configToAlts.Values() -} - -// PredictionModeGetStateToAltMap gets a map from state to alt subset from a configuration set. For each -// configuration {@code c} in {@code configs}: -// -//
-// map[c.{@link ATNConfig//state state}] U= c.{@link ATNConfig//alt alt}
-// 
-func PredictionModeGetStateToAltMap(configs ATNConfigSet) *AltDict { - m := NewAltDict() - - for _, c := range configs.GetItems() { - alts := m.Get(c.GetState().String()) - if alts == nil { - alts = NewBitSet() - m.put(c.GetState().String(), alts) - } - alts.(*BitSet).add(c.GetAlt()) - } - return m -} - -func PredictionModehasStateAssociatedWithOneAlt(configs ATNConfigSet) bool { - values := PredictionModeGetStateToAltMap(configs).values() - for i := 0; i < len(values); i++ { - if values[i].(*BitSet).length() == 1 { - return true - } - } - return false -} - -func PredictionModegetSingleViableAlt(altsets []*BitSet) int { - result := ATNInvalidAltNumber - - for i := 0; i < len(altsets); i++ { - alts := altsets[i] - minAlt := alts.minValue() - if result == ATNInvalidAltNumber { - result = minAlt - } else if result != minAlt { // more than 1 viable alt - return ATNInvalidAltNumber - } - } - return result -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/rule_context.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/rule_context.go deleted file mode 100644 index 210699ba2..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/rule_context.go +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. -// Use of this file is governed by the BSD 3-clause license that -// can be found in the LICENSE.txt file in the project root. - -package antlr - -// A rule context is a record of a single rule invocation. It knows -// which context invoked it, if any. If there is no parent context, then -// naturally the invoking state is not valid. The parent link -// provides a chain upwards from the current rule invocation to the root -// of the invocation tree, forming a stack. We actually carry no -// information about the rule associated with b context (except -// when parsing). We keep only the state number of the invoking state from -// the ATN submachine that invoked b. Contrast b with the s -// pointer inside ParserRuleContext that tracks the current state -// being "executed" for the current rule. -// -// The parent contexts are useful for computing lookahead sets and -// getting error information. -// -// These objects are used during parsing and prediction. -// For the special case of parsers, we use the subclass -// ParserRuleContext. -// -// @see ParserRuleContext -// - -type RuleContext interface { - RuleNode - - GetInvokingState() int - SetInvokingState(int) - - GetRuleIndex() int - IsEmpty() bool - - GetAltNumber() int - SetAltNumber(altNumber int) - - String([]string, RuleContext) string -} - -type BaseRuleContext struct { - parentCtx RuleContext - invokingState int - RuleIndex int -} - -func NewBaseRuleContext(parent RuleContext, invokingState int) *BaseRuleContext { - - rn := new(BaseRuleContext) - - // What context invoked b rule? - rn.parentCtx = parent - - // What state invoked the rule associated with b context? - // The "return address" is the followState of invokingState - // If parent is nil, b should be -1. - if parent == nil { - rn.invokingState = -1 - } else { - rn.invokingState = invokingState - } - - return rn -} - -func (b *BaseRuleContext) GetBaseRuleContext() *BaseRuleContext { - return b -} - -func (b *BaseRuleContext) SetParent(v Tree) { - if v == nil { - b.parentCtx = nil - } else { - b.parentCtx = v.(RuleContext) - } -} - -func (b *BaseRuleContext) GetInvokingState() int { - return b.invokingState -} - -func (b *BaseRuleContext) SetInvokingState(t int) { - b.invokingState = t -} - -func (b *BaseRuleContext) GetRuleIndex() int { - return b.RuleIndex -} - -func (b *BaseRuleContext) GetAltNumber() int { - return ATNInvalidAltNumber -} - -func (b *BaseRuleContext) SetAltNumber(altNumber int) {} - -// A context is empty if there is no invoking state meaning nobody call -// current context. -func (b *BaseRuleContext) IsEmpty() bool { - return b.invokingState == -1 -} - -// Return the combined text of all child nodes. This method only considers -// tokens which have been added to the parse tree. -//

-// Since tokens on hidden channels (e.g. whitespace or comments) are not -// added to the parse trees, they will not appear in the output of b -// method. -// - -func (b *BaseRuleContext) GetParent() Tree { - return b.parentCtx -} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils_set.go b/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils_set.go deleted file mode 100644 index c9bd6751e..000000000 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils_set.go +++ /dev/null @@ -1,235 +0,0 @@ -package antlr - -import "math" - -const ( - _initalCapacity = 16 - _initalBucketCapacity = 8 - _loadFactor = 0.75 -) - -type Set interface { - Add(value interface{}) (added interface{}) - Len() int - Get(value interface{}) (found interface{}) - Contains(value interface{}) bool - Values() []interface{} - Each(f func(interface{}) bool) -} - -type array2DHashSet struct { - buckets [][]Collectable[any] - hashcodeFunction func(interface{}) int - equalsFunction func(Collectable[any], Collectable[any]) bool - - n int // How many elements in set - threshold int // when to expand - - currentPrime int // jump by 4 primes each expand or whatever - initialBucketCapacity int -} - -func (as *array2DHashSet) Each(f func(interface{}) bool) { - if as.Len() < 1 { - return - } - - for _, bucket := range as.buckets { - for _, o := range bucket { - if o == nil { - break - } - if !f(o) { - return - } - } - } -} - -func (as *array2DHashSet) Values() []interface{} { - if as.Len() < 1 { - return nil - } - - values := make([]interface{}, 0, as.Len()) - as.Each(func(i interface{}) bool { - values = append(values, i) - return true - }) - return values -} - -func (as *array2DHashSet) Contains(value Collectable[any]) bool { - return as.Get(value) != nil -} - -func (as *array2DHashSet) Add(value Collectable[any]) interface{} { - if as.n > as.threshold { - as.expand() - } - return as.innerAdd(value) -} - -func (as *array2DHashSet) expand() { - old := as.buckets - - as.currentPrime += 4 - - var ( - newCapacity = len(as.buckets) << 1 - newTable = as.createBuckets(newCapacity) - newBucketLengths = make([]int, len(newTable)) - ) - - as.buckets = newTable - as.threshold = int(float64(newCapacity) * _loadFactor) - - for _, bucket := range old { - if bucket == nil { - continue - } - - for _, o := range bucket { - if o == nil { - break - } - - b := as.getBuckets(o) - bucketLength := newBucketLengths[b] - var newBucket []Collectable[any] - if bucketLength == 0 { - // new bucket - newBucket = as.createBucket(as.initialBucketCapacity) - newTable[b] = newBucket - } else { - newBucket = newTable[b] - if bucketLength == len(newBucket) { - // expand - newBucketCopy := make([]Collectable[any], len(newBucket)<<1) - copy(newBucketCopy[:bucketLength], newBucket) - newBucket = newBucketCopy - newTable[b] = newBucket - } - } - - newBucket[bucketLength] = o - newBucketLengths[b]++ - } - } -} - -func (as *array2DHashSet) Len() int { - return as.n -} - -func (as *array2DHashSet) Get(o Collectable[any]) interface{} { - if o == nil { - return nil - } - - b := as.getBuckets(o) - bucket := as.buckets[b] - if bucket == nil { // no bucket - return nil - } - - for _, e := range bucket { - if e == nil { - return nil // empty slot; not there - } - if as.equalsFunction(e, o) { - return e - } - } - - return nil -} - -func (as *array2DHashSet) innerAdd(o Collectable[any]) interface{} { - b := as.getBuckets(o) - - bucket := as.buckets[b] - - // new bucket - if bucket == nil { - bucket = as.createBucket(as.initialBucketCapacity) - bucket[0] = o - - as.buckets[b] = bucket - as.n++ - return o - } - - // look for it in bucket - for i := 0; i < len(bucket); i++ { - existing := bucket[i] - if existing == nil { // empty slot; not there, add. - bucket[i] = o - as.n++ - return o - } - - if as.equalsFunction(existing, o) { // found existing, quit - return existing - } - } - - // full bucket, expand and add to end - oldLength := len(bucket) - bucketCopy := make([]Collectable[any], oldLength<<1) - copy(bucketCopy[:oldLength], bucket) - bucket = bucketCopy - as.buckets[b] = bucket - bucket[oldLength] = o - as.n++ - return o -} - -func (as *array2DHashSet) getBuckets(value Collectable[any]) int { - hash := as.hashcodeFunction(value) - return hash & (len(as.buckets) - 1) -} - -func (as *array2DHashSet) createBuckets(cap int) [][]Collectable[any] { - return make([][]Collectable[any], cap) -} - -func (as *array2DHashSet) createBucket(cap int) []Collectable[any] { - return make([]Collectable[any], cap) -} - -func newArray2DHashSetWithCap( - hashcodeFunction func(interface{}) int, - equalsFunction func(Collectable[any], Collectable[any]) bool, - initCap int, - initBucketCap int, -) *array2DHashSet { - if hashcodeFunction == nil { - hashcodeFunction = standardHashFunction - } - - if equalsFunction == nil { - equalsFunction = standardEqualsFunction - } - - ret := &array2DHashSet{ - hashcodeFunction: hashcodeFunction, - equalsFunction: equalsFunction, - - n: 0, - threshold: int(math.Floor(_initalCapacity * _loadFactor)), - - currentPrime: 1, - initialBucketCapacity: initBucketCap, - } - - ret.buckets = ret.createBuckets(initCap) - return ret -} - -func newArray2DHashSet( - hashcodeFunction func(interface{}) int, - equalsFunction func(Collectable[any], Collectable[any]) bool, -) *array2DHashSet { - return newArray2DHashSetWithCap(hashcodeFunction, equalsFunction, _initalCapacity, _initalBucketCapacity) -} diff --git a/vendor/github.com/antlr4-go/antlr/v4/.gitignore b/vendor/github.com/antlr4-go/antlr/v4/.gitignore new file mode 100644 index 000000000..38ea34ff5 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/.gitignore @@ -0,0 +1,18 @@ +### Go template + +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + + +# Go workspace file +go.work + +# No Goland stuff in this repo +.idea diff --git a/vendor/github.com/antlr4-go/antlr/v4/LICENSE b/vendor/github.com/antlr4-go/antlr/v4/LICENSE new file mode 100644 index 000000000..a22292eb5 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2012-2023 The ANTLR Project. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright +notice, this list of conditions and the following disclaimer in the +documentation and/or other materials provided with the distribution. + +3. Neither name of copyright holders nor the names of its contributors +may be used to endorse or promote products derived from this software +without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/antlr4-go/antlr/v4/README.md b/vendor/github.com/antlr4-go/antlr/v4/README.md new file mode 100644 index 000000000..03e5b83eb --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/README.md @@ -0,0 +1,54 @@ +[![Go Report Card](https://goreportcard.com/badge/github.com/antlr4-go/antlr?style=flat-square)](https://goreportcard.com/report/github.com/antlr4-go/antlr) +[![PkgGoDev](https://pkg.go.dev/badge/github.com/github.com/antlr4-go/antlr)](https://pkg.go.dev/github.com/antlr4-go/antlr) +[![Release](https://img.shields.io/github/v/release/antlr4-go/antlr?sort=semver&style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest) +[![Release](https://img.shields.io/github/go-mod/go-version/antlr4-go/antlr?style=flat-square)](https://github.com/antlr4-go/antlr/releases/latest) +[![Maintenance](https://img.shields.io/badge/Maintained%3F-yes-green.svg?style=flat-square)](https://github.com/antlr4-go/antlr/commit-activity) +[![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause) +[![GitHub stars](https://img.shields.io/github/stars/antlr4-go/antlr?style=flat-square&label=Star&maxAge=2592000)](https://GitHub.com/Naereen/StrapDown.js/stargazers/) +# ANTLR4 Go Runtime Module Repo + +IMPORTANT: Please submit PRs via a clone of the https://github.com/antlr/antlr4 repo, and not here. + + - Do not submit PRs or any change requests to this repo + - This repo is read only and is updated by the ANTLR team to create a new release of the Go Runtime for ANTLR + - This repo contains the Go runtime that your generated projects should import + +## Introduction + +This repo contains the official modules for the Go Runtime for ANTLR. It is a copy of the runtime maintained +at: https://github.com/antlr/antlr4/tree/master/runtime/Go/antlr and is automatically updated by the ANTLR team to create +the official Go runtime release only. No development work is carried out in this repo and PRs are not accepted here. + +The dev branch of this repo is kept in sync with the dev branch of the main ANTLR repo and is updated periodically. + +### Why? + +The `go get` command is unable to retrieve the Go runtime when it is embedded so +deeply in the main repo. A `go get` against the `antlr/antlr4` repo, while retrieving the correct source code for the runtime, +does not correctly resolve tags and will create a reference in your `go.mod` file that is unclear, will not upgrade smoothly and +causes confusion. + +For instance, the current Go runtime release, which is tagged with v4.13.0 in `antlr/antlr4` is retrieved by go get as: + +```sh +require ( + github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230219212500-1f9a474cc2dc +) +``` + +Where you would expect to see: + +```sh +require ( + github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.13.0 +) +``` + +The decision was taken to create a separate org in a separate repo to hold the official Go runtime for ANTLR and +from whence users can expect `go get` to behave as expected. + + +# Documentation +Please read the official documentation at: https://github.com/antlr/antlr4/blob/master/doc/index.md for tips on +migrating existing projects to use the new module location and for information on how to use the Go runtime in +general. diff --git a/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go b/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go new file mode 100644 index 000000000..3bb4fd7c4 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/antlrdoc.go @@ -0,0 +1,102 @@ +/* +Package antlr implements the Go version of the ANTLR 4 runtime. + +# The ANTLR Tool + +ANTLR (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing, +or translating structured text or binary files. It's widely used to build languages, tools, and frameworks. +From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface +(or visitor) that makes it easy to respond to the recognition of phrases of interest. + +# Go Runtime + +At version 4.11.x and prior, the Go runtime was not properly versioned for go modules. After this point, the runtime +source code to be imported was held in the `runtime/Go/antlr/v4` directory, and the go.mod file was updated to reflect the version of +ANTLR4 that it is compatible with (I.E. uses the /v4 path). + +However, this was found to be problematic, as it meant that with the runtime embedded so far underneath the root +of the repo, the `go get` and related commands could not properly resolve the location of the go runtime source code. +This meant that the reference to the runtime in your `go.mod` file would refer to the correct source code, but would not +list the release tag such as @4.12.0 - this was confusing, to say the least. + +As of 4.12.1, the runtime is now available as a go module in its own repo, and can be imported as `github.com/antlr4-go/antlr` +(the go get command should also be used with this path). See the main documentation for the ANTLR4 project for more information, +which is available at [ANTLR docs]. The documentation for using the Go runtime is available at [Go runtime docs]. + +This means that if you are using the source code without modules, you should also use the source code in the [new repo]. +Though we highly recommend that you use go modules, as they are now idiomatic for Go. + +I am aware that this change will prove Hyrum's Law, but am prepared to live with it for the common good. + +Go runtime author: [Jim Idle] jimi@idle.ws + +# Code Generation + +ANTLR supports the generation of code in a number of [target languages], and the generated code is supported by a +runtime library, written specifically to support the generated code in the target language. This library is the +runtime for the Go target. + +To generate code for the go target, it is generally recommended to place the source grammar files in a package of +their own, and use the `.sh` script method of generating code, using the go generate directive. In that same directory +it is usual, though not required, to place the antlr tool that should be used to generate the code. That does mean +that the antlr tool JAR file will be checked in to your source code control though, so you are, of course, free to use any other +way of specifying the version of the ANTLR tool to use, such as aliasing in `.zshrc` or equivalent, or a profile in +your IDE, or configuration in your CI system. Checking in the jar does mean that it is easy to reproduce the build as +it was at any point in its history. + +Here is a general/recommended template for an ANTLR based recognizer in Go: + + . + ├── parser + │ ├── mygrammar.g4 + │ ├── antlr-4.12.1-complete.jar + │ ├── generate.go + │ └── generate.sh + ├── parsing - generated code goes here + │ └── error_listeners.go + ├── go.mod + ├── go.sum + ├── main.go + └── main_test.go + +Make sure that the package statement in your grammar file(s) reflects the go package the generated code will exist in. + +The generate.go file then looks like this: + + package parser + + //go:generate ./generate.sh + +And the generate.sh file will look similar to this: + + #!/bin/sh + + alias antlr4='java -Xmx500M -cp "./antlr4-4.12.1-complete.jar:$CLASSPATH" org.antlr.v4.Tool' + antlr4 -Dlanguage=Go -no-visitor -package parsing *.g4 + +depending on whether you want visitors or listeners or any other ANTLR options. Not that another option here +is to generate the code into a + +From the command line at the root of your source package (location of go.mo)d) you can then simply issue the command: + + go generate ./... + +Which will generate the code for the parser, and place it in the parsing package. You can then use the generated code +by importing the parsing package. + +There are no hard and fast rules on this. It is just a recommendation. You can generate the code in any way and to anywhere you like. + +# Copyright Notice + +Copyright (c) 2012-2023 The ANTLR Project. All rights reserved. + +Use of this file is governed by the BSD 3-clause license, which can be found in the [LICENSE.txt] file in the project root. + +[target languages]: https://github.com/antlr/antlr4/tree/master/runtime +[LICENSE.txt]: https://github.com/antlr/antlr4/blob/master/LICENSE.txt +[ANTLR docs]: https://github.com/antlr/antlr4/blob/master/doc/index.md +[new repo]: https://github.com/antlr4-go/antlr +[Jim Idle]: https://github.com/jimidle +[Go runtime docs]: https://github.com/antlr/antlr4/blob/master/doc/go-target.md +*/ +package antlr diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn.go b/vendor/github.com/antlr4-go/antlr/v4/atn.go similarity index 94% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn.go rename to vendor/github.com/antlr4-go/antlr/v4/atn.go index 98010d2e6..cdeefed24 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn.go +++ b/vendor/github.com/antlr4-go/antlr/v4/atn.go @@ -20,10 +20,11 @@ var ATNInvalidAltNumber int // [ALL(*)]: https://www.antlr.org/papers/allstar-techreport.pdf // [Recursive Transition Network]: https://en.wikipedia.org/wiki/Recursive_transition_network type ATN struct { - // DecisionToState is the decision points for all rules, subrules, optional - // blocks, ()+, ()*, etc. Each subrule/rule is a decision point, and we must track them so we + + // DecisionToState is the decision points for all rules, sub-rules, optional + // blocks, ()+, ()*, etc. Each sub-rule/rule is a decision point, and we must track them, so we // can go back later and build DFA predictors for them. This includes - // all the rules, subrules, optional blocks, ()+, ()* etc... + // all the rules, sub-rules, optional blocks, ()+, ()* etc... DecisionToState []DecisionState // grammarType is the ATN type and is used for deserializing ATNs from strings. @@ -51,6 +52,8 @@ type ATN struct { // specified, and otherwise is nil. ruleToTokenType []int + // ATNStates is a list of all states in the ATN, ordered by state number. + // states []ATNState mu sync.Mutex diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config.go new file mode 100644 index 000000000..a83f25d34 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_config.go @@ -0,0 +1,335 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +import ( + "fmt" +) + +const ( + lexerConfig = iota // Indicates that this ATNConfig is for a lexer + parserConfig // Indicates that this ATNConfig is for a parser +) + +// ATNConfig is a tuple: (ATN state, predicted alt, syntactic, semantic +// context). The syntactic context is a graph-structured stack node whose +// path(s) to the root is the rule invocation(s) chain used to arrive in the +// state. The semantic context is the tree of semantic predicates encountered +// before reaching an ATN state. +type ATNConfig struct { + precedenceFilterSuppressed bool + state ATNState + alt int + context *PredictionContext + semanticContext SemanticContext + reachesIntoOuterContext int + cType int // lexerConfig or parserConfig + lexerActionExecutor *LexerActionExecutor + passedThroughNonGreedyDecision bool +} + +// NewATNConfig6 creates a new ATNConfig instance given a state, alt and context only +func NewATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig { + return NewATNConfig5(state, alt, context, SemanticContextNone) +} + +// NewATNConfig5 creates a new ATNConfig instance given a state, alt, context and semantic context +func NewATNConfig5(state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) *ATNConfig { + if semanticContext == nil { + panic("semanticContext cannot be nil") // TODO: Necessary? + } + + pac := &ATNConfig{} + pac.state = state + pac.alt = alt + pac.context = context + pac.semanticContext = semanticContext + pac.cType = parserConfig + return pac +} + +// NewATNConfig4 creates a new ATNConfig instance given an existing config, and a state only +func NewATNConfig4(c *ATNConfig, state ATNState) *ATNConfig { + return NewATNConfig(c, state, c.GetContext(), c.GetSemanticContext()) +} + +// NewATNConfig3 creates a new ATNConfig instance given an existing config, a state and a semantic context +func NewATNConfig3(c *ATNConfig, state ATNState, semanticContext SemanticContext) *ATNConfig { + return NewATNConfig(c, state, c.GetContext(), semanticContext) +} + +// NewATNConfig2 creates a new ATNConfig instance given an existing config, and a context only +func NewATNConfig2(c *ATNConfig, semanticContext SemanticContext) *ATNConfig { + return NewATNConfig(c, c.GetState(), c.GetContext(), semanticContext) +} + +// NewATNConfig1 creates a new ATNConfig instance given an existing config, a state, and a context only +func NewATNConfig1(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig { + return NewATNConfig(c, state, context, c.GetSemanticContext()) +} + +// NewATNConfig creates a new ATNConfig instance given an existing config, a state, a context and a semantic context, other 'constructors' +// are just wrappers around this one. +func NewATNConfig(c *ATNConfig, state ATNState, context *PredictionContext, semanticContext SemanticContext) *ATNConfig { + if semanticContext == nil { + panic("semanticContext cannot be nil") // TODO: Remove this - probably put here for some bug that is now fixed + } + b := &ATNConfig{} + b.InitATNConfig(c, state, c.GetAlt(), context, semanticContext) + b.cType = parserConfig + return b +} + +func (a *ATNConfig) InitATNConfig(c *ATNConfig, state ATNState, alt int, context *PredictionContext, semanticContext SemanticContext) { + + a.state = state + a.alt = alt + a.context = context + a.semanticContext = semanticContext + a.reachesIntoOuterContext = c.GetReachesIntoOuterContext() + a.precedenceFilterSuppressed = c.getPrecedenceFilterSuppressed() +} + +func (a *ATNConfig) getPrecedenceFilterSuppressed() bool { + return a.precedenceFilterSuppressed +} + +func (a *ATNConfig) setPrecedenceFilterSuppressed(v bool) { + a.precedenceFilterSuppressed = v +} + +// GetState returns the ATN state associated with this configuration +func (a *ATNConfig) GetState() ATNState { + return a.state +} + +// GetAlt returns the alternative associated with this configuration +func (a *ATNConfig) GetAlt() int { + return a.alt +} + +// SetContext sets the rule invocation stack associated with this configuration +func (a *ATNConfig) SetContext(v *PredictionContext) { + a.context = v +} + +// GetContext returns the rule invocation stack associated with this configuration +func (a *ATNConfig) GetContext() *PredictionContext { + return a.context +} + +// GetSemanticContext returns the semantic context associated with this configuration +func (a *ATNConfig) GetSemanticContext() SemanticContext { + return a.semanticContext +} + +// GetReachesIntoOuterContext returns the count of references to an outer context from this configuration +func (a *ATNConfig) GetReachesIntoOuterContext() int { + return a.reachesIntoOuterContext +} + +// SetReachesIntoOuterContext sets the count of references to an outer context from this configuration +func (a *ATNConfig) SetReachesIntoOuterContext(v int) { + a.reachesIntoOuterContext = v +} + +// Equals is the default comparison function for an ATNConfig when no specialist implementation is required +// for a collection. +// +// An ATN configuration is equal to another if both have the same state, they +// predict the same alternative, and syntactic/semantic contexts are the same. +func (a *ATNConfig) Equals(o Collectable[*ATNConfig]) bool { + switch a.cType { + case lexerConfig: + return a.LEquals(o) + case parserConfig: + return a.PEquals(o) + default: + panic("Invalid ATNConfig type") + } +} + +// PEquals is the default comparison function for a Parser ATNConfig when no specialist implementation is required +// for a collection. +// +// An ATN configuration is equal to another if both have the same state, they +// predict the same alternative, and syntactic/semantic contexts are the same. +func (a *ATNConfig) PEquals(o Collectable[*ATNConfig]) bool { + var other, ok = o.(*ATNConfig) + + if !ok { + return false + } + if a == other { + return true + } else if other == nil { + return false + } + + var equal bool + + if a.context == nil { + equal = other.context == nil + } else { + equal = a.context.Equals(other.context) + } + + var ( + nums = a.state.GetStateNumber() == other.state.GetStateNumber() + alts = a.alt == other.alt + cons = a.semanticContext.Equals(other.semanticContext) + sups = a.precedenceFilterSuppressed == other.precedenceFilterSuppressed + ) + + return nums && alts && cons && sups && equal +} + +// Hash is the default hash function for a parser ATNConfig, when no specialist hash function +// is required for a collection +func (a *ATNConfig) Hash() int { + switch a.cType { + case lexerConfig: + return a.LHash() + case parserConfig: + return a.PHash() + default: + panic("Invalid ATNConfig type") + } +} + +// PHash is the default hash function for a parser ATNConfig, when no specialist hash function +// is required for a collection +func (a *ATNConfig) PHash() int { + var c int + if a.context != nil { + c = a.context.Hash() + } + + h := murmurInit(7) + h = murmurUpdate(h, a.state.GetStateNumber()) + h = murmurUpdate(h, a.alt) + h = murmurUpdate(h, c) + h = murmurUpdate(h, a.semanticContext.Hash()) + return murmurFinish(h, 4) +} + +// String returns a string representation of the ATNConfig, usually used for debugging purposes +func (a *ATNConfig) String() string { + var s1, s2, s3 string + + if a.context != nil { + s1 = ",[" + fmt.Sprint(a.context) + "]" + } + + if a.semanticContext != SemanticContextNone { + s2 = "," + fmt.Sprint(a.semanticContext) + } + + if a.reachesIntoOuterContext > 0 { + s3 = ",up=" + fmt.Sprint(a.reachesIntoOuterContext) + } + + return fmt.Sprintf("(%v,%v%v%v%v)", a.state, a.alt, s1, s2, s3) +} + +func NewLexerATNConfig6(state ATNState, alt int, context *PredictionContext) *ATNConfig { + lac := &ATNConfig{} + lac.state = state + lac.alt = alt + lac.context = context + lac.semanticContext = SemanticContextNone + lac.cType = lexerConfig + return lac +} + +func NewLexerATNConfig4(c *ATNConfig, state ATNState) *ATNConfig { + lac := &ATNConfig{} + lac.lexerActionExecutor = c.lexerActionExecutor + lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state) + lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext()) + lac.cType = lexerConfig + return lac +} + +func NewLexerATNConfig3(c *ATNConfig, state ATNState, lexerActionExecutor *LexerActionExecutor) *ATNConfig { + lac := &ATNConfig{} + lac.lexerActionExecutor = lexerActionExecutor + lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state) + lac.InitATNConfig(c, state, c.GetAlt(), c.GetContext(), c.GetSemanticContext()) + lac.cType = lexerConfig + return lac +} + +func NewLexerATNConfig2(c *ATNConfig, state ATNState, context *PredictionContext) *ATNConfig { + lac := &ATNConfig{} + lac.lexerActionExecutor = c.lexerActionExecutor + lac.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state) + lac.InitATNConfig(c, state, c.GetAlt(), context, c.GetSemanticContext()) + lac.cType = lexerConfig + return lac +} + +//goland:noinspection GoUnusedExportedFunction +func NewLexerATNConfig1(state ATNState, alt int, context *PredictionContext) *ATNConfig { + lac := &ATNConfig{} + lac.state = state + lac.alt = alt + lac.context = context + lac.semanticContext = SemanticContextNone + lac.cType = lexerConfig + return lac +} + +// LHash is the default hash function for Lexer ATNConfig objects, it can be used directly or via +// the default comparator [ObjEqComparator]. +func (a *ATNConfig) LHash() int { + var f int + if a.passedThroughNonGreedyDecision { + f = 1 + } else { + f = 0 + } + h := murmurInit(7) + h = murmurUpdate(h, a.state.GetStateNumber()) + h = murmurUpdate(h, a.alt) + h = murmurUpdate(h, a.context.Hash()) + h = murmurUpdate(h, a.semanticContext.Hash()) + h = murmurUpdate(h, f) + h = murmurUpdate(h, a.lexerActionExecutor.Hash()) + h = murmurFinish(h, 6) + return h +} + +// LEquals is the default comparison function for Lexer ATNConfig objects, it can be used directly or via +// the default comparator [ObjEqComparator]. +func (a *ATNConfig) LEquals(other Collectable[*ATNConfig]) bool { + var otherT, ok = other.(*ATNConfig) + if !ok { + return false + } else if a == otherT { + return true + } else if a.passedThroughNonGreedyDecision != otherT.passedThroughNonGreedyDecision { + return false + } + + switch { + case a.lexerActionExecutor == nil && otherT.lexerActionExecutor == nil: + return true + case a.lexerActionExecutor != nil && otherT.lexerActionExecutor != nil: + if !a.lexerActionExecutor.Equals(otherT.lexerActionExecutor) { + return false + } + default: + return false // One but not both, are nil + } + + return a.PEquals(otherT) +} + +func checkNonGreedyDecision(source *ATNConfig, target ATNState) bool { + var ds, ok = target.(DecisionState) + + return source.passedThroughNonGreedyDecision || (ok && ds.getNonGreedy()) +} diff --git a/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go b/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go new file mode 100644 index 000000000..52dbaf806 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_config_set.go @@ -0,0 +1,301 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +import ( + "fmt" +) + +// ATNConfigSet is a specialized set of ATNConfig that tracks information +// about its elements and can combine similar configurations using a +// graph-structured stack. +type ATNConfigSet struct { + cachedHash int + + // configLookup is used to determine whether two ATNConfigSets are equal. We + // need all configurations with the same (s, i, _, semctx) to be equal. A key + // effectively doubles the number of objects associated with ATNConfigs. All + // keys are hashed by (s, i, _, pi), not including the context. Wiped out when + // read-only because a set becomes a DFA state. + configLookup *JStore[*ATNConfig, Comparator[*ATNConfig]] + + // configs is the added elements that did not match an existing key in configLookup + configs []*ATNConfig + + // TODO: These fields make me pretty uncomfortable, but it is nice to pack up + // info together because it saves re-computation. Can we track conflicts as they + // are added to save scanning configs later? + conflictingAlts *BitSet + + // dipsIntoOuterContext is used by parsers and lexers. In a lexer, it indicates + // we hit a pred while computing a closure operation. Do not make a DFA state + // from the ATNConfigSet in this case. TODO: How is this used by parsers? + dipsIntoOuterContext bool + + // fullCtx is whether it is part of a full context LL prediction. Used to + // determine how to merge $. It is a wildcard with SLL, but not for an LL + // context merge. + fullCtx bool + + // Used in parser and lexer. In lexer, it indicates we hit a pred + // while computing a closure operation. Don't make a DFA state from this set. + hasSemanticContext bool + + // readOnly is whether it is read-only. Do not + // allow any code to manipulate the set if true because DFA states will point at + // sets and those must not change. It not, protect other fields; conflictingAlts + // in particular, which is assigned after readOnly. + readOnly bool + + // TODO: These fields make me pretty uncomfortable, but it is nice to pack up + // info together because it saves re-computation. Can we track conflicts as they + // are added to save scanning configs later? + uniqueAlt int +} + +// Alts returns the combined set of alts for all the configurations in this set. +func (b *ATNConfigSet) Alts() *BitSet { + alts := NewBitSet() + for _, it := range b.configs { + alts.add(it.GetAlt()) + } + return alts +} + +// NewATNConfigSet creates a new ATNConfigSet instance. +func NewATNConfigSet(fullCtx bool) *ATNConfigSet { + return &ATNConfigSet{ + cachedHash: -1, + configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()"), + fullCtx: fullCtx, + } +} + +// Add merges contexts with existing configs for (s, i, pi, _), +// where 's' is the ATNConfig.state, 'i' is the ATNConfig.alt, and +// 'pi' is the [ATNConfig].semanticContext. +// +// We use (s,i,pi) as the key. +// Updates dipsIntoOuterContext and hasSemanticContext when necessary. +func (b *ATNConfigSet) Add(config *ATNConfig, mergeCache *JPCMap) bool { + if b.readOnly { + panic("set is read-only") + } + + if config.GetSemanticContext() != SemanticContextNone { + b.hasSemanticContext = true + } + + if config.GetReachesIntoOuterContext() > 0 { + b.dipsIntoOuterContext = true + } + + existing, present := b.configLookup.Put(config) + + // The config was not already in the set + // + if !present { + b.cachedHash = -1 + b.configs = append(b.configs, config) // Track order here + return true + } + + // Merge a previous (s, i, pi, _) with it and save the result + rootIsWildcard := !b.fullCtx + merged := merge(existing.GetContext(), config.GetContext(), rootIsWildcard, mergeCache) + + // No need to check for existing.context because config.context is in the cache, + // since the only way to create new graphs is the "call rule" and here. We cache + // at both places. + existing.SetReachesIntoOuterContext(intMax(existing.GetReachesIntoOuterContext(), config.GetReachesIntoOuterContext())) + + // Preserve the precedence filter suppression during the merge + if config.getPrecedenceFilterSuppressed() { + existing.setPrecedenceFilterSuppressed(true) + } + + // Replace the context because there is no need to do alt mapping + existing.SetContext(merged) + + return true +} + +// GetStates returns the set of states represented by all configurations in this config set +func (b *ATNConfigSet) GetStates() *JStore[ATNState, Comparator[ATNState]] { + + // states uses the standard comparator and Hash() provided by the ATNState instance + // + states := NewJStore[ATNState, Comparator[ATNState]](aStateEqInst, ATNStateCollection, "ATNConfigSet.GetStates()") + + for i := 0; i < len(b.configs); i++ { + states.Put(b.configs[i].GetState()) + } + + return states +} + +func (b *ATNConfigSet) GetPredicates() []SemanticContext { + predicates := make([]SemanticContext, 0) + + for i := 0; i < len(b.configs); i++ { + c := b.configs[i].GetSemanticContext() + + if c != SemanticContextNone { + predicates = append(predicates, c) + } + } + + return predicates +} + +func (b *ATNConfigSet) OptimizeConfigs(interpreter *BaseATNSimulator) { + if b.readOnly { + panic("set is read-only") + } + + // Empty indicate no optimization is possible + if b.configLookup == nil || b.configLookup.Len() == 0 { + return + } + + for i := 0; i < len(b.configs); i++ { + config := b.configs[i] + config.SetContext(interpreter.getCachedContext(config.GetContext())) + } +} + +func (b *ATNConfigSet) AddAll(coll []*ATNConfig) bool { + for i := 0; i < len(coll); i++ { + b.Add(coll[i], nil) + } + + return false +} + +// Compare The configs are only equal if they are in the same order and their Equals function returns true. +// Java uses ArrayList.equals(), which requires the same order. +func (b *ATNConfigSet) Compare(bs *ATNConfigSet) bool { + if len(b.configs) != len(bs.configs) { + return false + } + for i := 0; i < len(b.configs); i++ { + if !b.configs[i].Equals(bs.configs[i]) { + return false + } + } + + return true +} + +func (b *ATNConfigSet) Equals(other Collectable[ATNConfig]) bool { + if b == other { + return true + } else if _, ok := other.(*ATNConfigSet); !ok { + return false + } + + other2 := other.(*ATNConfigSet) + var eca bool + switch { + case b.conflictingAlts == nil && other2.conflictingAlts == nil: + eca = true + case b.conflictingAlts != nil && other2.conflictingAlts != nil: + eca = b.conflictingAlts.equals(other2.conflictingAlts) + } + return b.configs != nil && + b.fullCtx == other2.fullCtx && + b.uniqueAlt == other2.uniqueAlt && + eca && + b.hasSemanticContext == other2.hasSemanticContext && + b.dipsIntoOuterContext == other2.dipsIntoOuterContext && + b.Compare(other2) +} + +func (b *ATNConfigSet) Hash() int { + if b.readOnly { + if b.cachedHash == -1 { + b.cachedHash = b.hashCodeConfigs() + } + + return b.cachedHash + } + + return b.hashCodeConfigs() +} + +func (b *ATNConfigSet) hashCodeConfigs() int { + h := 1 + for _, config := range b.configs { + h = 31*h + config.Hash() + } + return h +} + +func (b *ATNConfigSet) Contains(item *ATNConfig) bool { + if b.readOnly { + panic("not implemented for read-only sets") + } + if b.configLookup == nil { + return false + } + return b.configLookup.Contains(item) +} + +func (b *ATNConfigSet) ContainsFast(item *ATNConfig) bool { + return b.Contains(item) +} + +func (b *ATNConfigSet) Clear() { + if b.readOnly { + panic("set is read-only") + } + b.configs = make([]*ATNConfig, 0) + b.cachedHash = -1 + b.configLookup = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfCompInst, ATNConfigLookupCollection, "NewATNConfigSet()") +} + +func (b *ATNConfigSet) String() string { + + s := "[" + + for i, c := range b.configs { + s += c.String() + + if i != len(b.configs)-1 { + s += ", " + } + } + + s += "]" + + if b.hasSemanticContext { + s += ",hasSemanticContext=" + fmt.Sprint(b.hasSemanticContext) + } + + if b.uniqueAlt != ATNInvalidAltNumber { + s += ",uniqueAlt=" + fmt.Sprint(b.uniqueAlt) + } + + if b.conflictingAlts != nil { + s += ",conflictingAlts=" + b.conflictingAlts.String() + } + + if b.dipsIntoOuterContext { + s += ",dipsIntoOuterContext" + } + + return s +} + +// NewOrderedATNConfigSet creates a config set with a slightly different Hash/Equal pair +// for use in lexers. +func NewOrderedATNConfigSet() *ATNConfigSet { + return &ATNConfigSet{ + cachedHash: -1, + // This set uses the standard Hash() and Equals() from ATNConfig + configLookup: NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ATNConfigCollection, "ATNConfigSet.NewOrderedATNConfigSet()"), + fullCtx: false, + } +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserialization_options.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go similarity index 86% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserialization_options.go rename to vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go index 3c975ec7b..bdb30b362 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserialization_options.go +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_deserialization_options.go @@ -20,7 +20,7 @@ func (opts *ATNDeserializationOptions) ReadOnly() bool { func (opts *ATNDeserializationOptions) SetReadOnly(readOnly bool) { if opts.readOnly { - panic(errors.New("Cannot mutate read only ATNDeserializationOptions")) + panic(errors.New("cannot mutate read only ATNDeserializationOptions")) } opts.readOnly = readOnly } @@ -31,7 +31,7 @@ func (opts *ATNDeserializationOptions) VerifyATN() bool { func (opts *ATNDeserializationOptions) SetVerifyATN(verifyATN bool) { if opts.readOnly { - panic(errors.New("Cannot mutate read only ATNDeserializationOptions")) + panic(errors.New("cannot mutate read only ATNDeserializationOptions")) } opts.verifyATN = verifyATN } @@ -42,11 +42,12 @@ func (opts *ATNDeserializationOptions) GenerateRuleBypassTransitions() bool { func (opts *ATNDeserializationOptions) SetGenerateRuleBypassTransitions(generateRuleBypassTransitions bool) { if opts.readOnly { - panic(errors.New("Cannot mutate read only ATNDeserializationOptions")) + panic(errors.New("cannot mutate read only ATNDeserializationOptions")) } opts.generateRuleBypassTransitions = generateRuleBypassTransitions } +//goland:noinspection GoUnusedExportedFunction func DefaultATNDeserializationOptions() *ATNDeserializationOptions { return NewATNDeserializationOptions(&defaultATNDeserializationOptions) } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserializer.go b/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go similarity index 97% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserializer.go rename to vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go index 3888856b4..2dcb9ae11 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_deserializer.go +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_deserializer.go @@ -35,6 +35,7 @@ func NewATNDeserializer(options *ATNDeserializationOptions) *ATNDeserializer { return &ATNDeserializer{options: options} } +//goland:noinspection GoUnusedFunction func stringInSlice(a string, list []string) int { for i, b := range list { if b == a { @@ -193,7 +194,7 @@ func (a *ATNDeserializer) readModes(atn *ATN) { } } -func (a *ATNDeserializer) readSets(atn *ATN, sets []*IntervalSet) []*IntervalSet { +func (a *ATNDeserializer) readSets(_ *ATN, sets []*IntervalSet) []*IntervalSet { m := a.readInt() // Preallocate the needed capacity. @@ -350,7 +351,7 @@ func (a *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) { bypassStart.endState = bypassStop - atn.defineDecisionState(bypassStart.BaseDecisionState) + atn.defineDecisionState(&bypassStart.BaseDecisionState) bypassStop.startState = bypassStart @@ -450,7 +451,7 @@ func (a *ATNDeserializer) markPrecedenceDecisions(atn *ATN) { continue } - // We analyze the ATN to determine if a ATN decision state is the + // We analyze the [ATN] to determine if an ATN decision state is the // decision for the closure block that determines whether a // precedence rule should continue or complete. if atn.ruleToStartState[state.GetRuleIndex()].isPrecedenceRule { @@ -553,7 +554,7 @@ func (a *ATNDeserializer) readInt() int { return int(v) // data is 32 bits but int is at least that big } -func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition { +func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, _, trg, arg1, arg2, arg3 int, sets []*IntervalSet) Transition { target := atn.states[trg] switch typeIndex { diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go similarity index 66% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_simulator.go rename to vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go index 41529115f..afe6c9f80 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_simulator.go +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_simulator.go @@ -4,7 +4,7 @@ package antlr -var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewBaseATNConfigSet(false)) +var ATNSimulatorError = NewDFAState(0x7FFFFFFF, NewATNConfigSet(false)) type IATNSimulator interface { SharedContextCache() *PredictionContextCache @@ -18,22 +18,13 @@ type BaseATNSimulator struct { decisionToDFA []*DFA } -func NewBaseATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *BaseATNSimulator { - b := new(BaseATNSimulator) - - b.atn = atn - b.sharedContextCache = sharedContextCache - - return b -} - -func (b *BaseATNSimulator) getCachedContext(context PredictionContext) PredictionContext { +func (b *BaseATNSimulator) getCachedContext(context *PredictionContext) *PredictionContext { if b.sharedContextCache == nil { return context } - visited := make(map[PredictionContext]PredictionContext) - + //visited := NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionVisitedCollection, "Visit map in getCachedContext()") + visited := NewVisitRecord() return getCachedBasePredictionContext(context, b.sharedContextCache, visited) } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_state.go b/vendor/github.com/antlr4-go/antlr/v4/atn_state.go similarity index 65% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_state.go rename to vendor/github.com/antlr4-go/antlr/v4/atn_state.go index 1f2a56bc3..2ae5807cd 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_state.go +++ b/vendor/github.com/antlr4-go/antlr/v4/atn_state.go @@ -4,7 +4,11 @@ package antlr -import "strconv" +import ( + "fmt" + "os" + "strconv" +) // Constants for serialization. const ( @@ -25,6 +29,7 @@ const ( ATNStateInvalidStateNumber = -1 ) +//goland:noinspection GoUnusedGlobalVariable var ATNStateInitialNumTransitions = 4 type ATNState interface { @@ -73,7 +78,7 @@ type BaseATNState struct { transitions []Transition } -func NewBaseATNState() *BaseATNState { +func NewATNState() *BaseATNState { return &BaseATNState{stateNumber: ATNStateInvalidStateNumber, stateType: ATNStateInvalidType} } @@ -148,27 +153,46 @@ func (as *BaseATNState) AddTransition(trans Transition, index int) { if len(as.transitions) == 0 { as.epsilonOnlyTransitions = trans.getIsEpsilon() } else if as.epsilonOnlyTransitions != trans.getIsEpsilon() { + _, _ = fmt.Fprintf(os.Stdin, "ATN state %d has both epsilon and non-epsilon transitions.\n", as.stateNumber) as.epsilonOnlyTransitions = false } + // TODO: Check code for already present compared to the Java equivalent + //alreadyPresent := false + //for _, t := range as.transitions { + // if t.getTarget().GetStateNumber() == trans.getTarget().GetStateNumber() { + // if t.getLabel() != nil && trans.getLabel() != nil && trans.getLabel().Equals(t.getLabel()) { + // alreadyPresent = true + // break + // } + // } else if t.getIsEpsilon() && trans.getIsEpsilon() { + // alreadyPresent = true + // break + // } + //} + //if !alreadyPresent { if index == -1 { as.transitions = append(as.transitions, trans) } else { as.transitions = append(as.transitions[:index], append([]Transition{trans}, as.transitions[index:]...)...) // TODO: as.transitions.splice(index, 1, trans) } + //} else { + // _, _ = fmt.Fprintf(os.Stderr, "Transition already present in state %d\n", as.stateNumber) + //} } type BasicState struct { - *BaseATNState + BaseATNState } func NewBasicState() *BasicState { - b := NewBaseATNState() - - b.stateType = ATNStateBasic - - return &BasicState{BaseATNState: b} + return &BasicState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateBasic, + }, + } } type DecisionState interface { @@ -182,13 +206,19 @@ type DecisionState interface { } type BaseDecisionState struct { - *BaseATNState + BaseATNState decision int nonGreedy bool } func NewBaseDecisionState() *BaseDecisionState { - return &BaseDecisionState{BaseATNState: NewBaseATNState(), decision: -1} + return &BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateBasic, + }, + decision: -1, + } } func (s *BaseDecisionState) getDecision() int { @@ -216,12 +246,20 @@ type BlockStartState interface { // BaseBlockStartState is the start of a regular (...) block. type BaseBlockStartState struct { - *BaseDecisionState + BaseDecisionState endState *BlockEndState } func NewBlockStartState() *BaseBlockStartState { - return &BaseBlockStartState{BaseDecisionState: NewBaseDecisionState()} + return &BaseBlockStartState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateBasic, + }, + decision: -1, + }, + } } func (s *BaseBlockStartState) getEndState() *BlockEndState { @@ -233,31 +271,38 @@ func (s *BaseBlockStartState) setEndState(b *BlockEndState) { } type BasicBlockStartState struct { - *BaseBlockStartState + BaseBlockStartState } func NewBasicBlockStartState() *BasicBlockStartState { - b := NewBlockStartState() - - b.stateType = ATNStateBlockStart - - return &BasicBlockStartState{BaseBlockStartState: b} + return &BasicBlockStartState{ + BaseBlockStartState: BaseBlockStartState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateBlockStart, + }, + }, + }, + } } var _ BlockStartState = &BasicBlockStartState{} // BlockEndState is a terminal node of a simple (a|b|c) block. type BlockEndState struct { - *BaseATNState + BaseATNState startState ATNState } func NewBlockEndState() *BlockEndState { - b := NewBaseATNState() - - b.stateType = ATNStateBlockEnd - - return &BlockEndState{BaseATNState: b} + return &BlockEndState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateBlockEnd, + }, + startState: nil, + } } // RuleStopState is the last node in the ATN for a rule, unless that rule is the @@ -265,43 +310,48 @@ func NewBlockEndState() *BlockEndState { // encode references to all calls to this rule to compute FOLLOW sets for error // handling. type RuleStopState struct { - *BaseATNState + BaseATNState } func NewRuleStopState() *RuleStopState { - b := NewBaseATNState() - - b.stateType = ATNStateRuleStop - - return &RuleStopState{BaseATNState: b} + return &RuleStopState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateRuleStop, + }, + } } type RuleStartState struct { - *BaseATNState + BaseATNState stopState ATNState isPrecedenceRule bool } func NewRuleStartState() *RuleStartState { - b := NewBaseATNState() - - b.stateType = ATNStateRuleStart - - return &RuleStartState{BaseATNState: b} + return &RuleStartState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateRuleStart, + }, + } } // PlusLoopbackState is a decision state for A+ and (A|B)+. It has two // transitions: one to the loop back to start of the block, and one to exit. type PlusLoopbackState struct { - *BaseDecisionState + BaseDecisionState } func NewPlusLoopbackState() *PlusLoopbackState { - b := NewBaseDecisionState() - - b.stateType = ATNStatePlusLoopBack - - return &PlusLoopbackState{BaseDecisionState: b} + return &PlusLoopbackState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStatePlusLoopBack, + }, + }, + } } // PlusBlockStartState is the start of a (A|B|...)+ loop. Technically it is a @@ -309,85 +359,103 @@ func NewPlusLoopbackState() *PlusLoopbackState { // it is included for completeness. In reality, PlusLoopbackState is the real // decision-making node for A+. type PlusBlockStartState struct { - *BaseBlockStartState + BaseBlockStartState loopBackState ATNState } func NewPlusBlockStartState() *PlusBlockStartState { - b := NewBlockStartState() - - b.stateType = ATNStatePlusBlockStart - - return &PlusBlockStartState{BaseBlockStartState: b} + return &PlusBlockStartState{ + BaseBlockStartState: BaseBlockStartState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStatePlusBlockStart, + }, + }, + }, + } } var _ BlockStartState = &PlusBlockStartState{} // StarBlockStartState is the block that begins a closure loop. type StarBlockStartState struct { - *BaseBlockStartState + BaseBlockStartState } func NewStarBlockStartState() *StarBlockStartState { - b := NewBlockStartState() - - b.stateType = ATNStateStarBlockStart - - return &StarBlockStartState{BaseBlockStartState: b} + return &StarBlockStartState{ + BaseBlockStartState: BaseBlockStartState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateStarBlockStart, + }, + }, + }, + } } var _ BlockStartState = &StarBlockStartState{} type StarLoopbackState struct { - *BaseATNState + BaseATNState } func NewStarLoopbackState() *StarLoopbackState { - b := NewBaseATNState() - - b.stateType = ATNStateStarLoopBack - - return &StarLoopbackState{BaseATNState: b} + return &StarLoopbackState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateStarLoopBack, + }, + } } type StarLoopEntryState struct { - *BaseDecisionState + BaseDecisionState loopBackState ATNState precedenceRuleDecision bool } func NewStarLoopEntryState() *StarLoopEntryState { - b := NewBaseDecisionState() - - b.stateType = ATNStateStarLoopEntry - // False precedenceRuleDecision indicates whether s state can benefit from a precedence DFA during SLL decision making. - return &StarLoopEntryState{BaseDecisionState: b} + return &StarLoopEntryState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateStarLoopEntry, + }, + }, + } } // LoopEndState marks the end of a * or + loop. type LoopEndState struct { - *BaseATNState + BaseATNState loopBackState ATNState } func NewLoopEndState() *LoopEndState { - b := NewBaseATNState() - - b.stateType = ATNStateLoopEnd - - return &LoopEndState{BaseATNState: b} + return &LoopEndState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateLoopEnd, + }, + } } // TokensStartState is the Tokens rule start state linking to each lexer rule start state. type TokensStartState struct { - *BaseDecisionState + BaseDecisionState } func NewTokensStartState() *TokensStartState { - b := NewBaseDecisionState() - - b.stateType = ATNStateTokenStart - - return &TokensStartState{BaseDecisionState: b} + return &TokensStartState{ + BaseDecisionState: BaseDecisionState{ + BaseATNState: BaseATNState{ + stateNumber: ATNStateInvalidStateNumber, + stateType: ATNStateTokenStart, + }, + }, + } } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_type.go b/vendor/github.com/antlr4-go/antlr/v4/atn_type.go similarity index 100% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/atn_type.go rename to vendor/github.com/antlr4-go/antlr/v4/atn_type.go diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/char_stream.go b/vendor/github.com/antlr4-go/antlr/v4/char_stream.go similarity index 89% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/char_stream.go rename to vendor/github.com/antlr4-go/antlr/v4/char_stream.go index c33f0adb5..bd8127b6b 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/char_stream.go +++ b/vendor/github.com/antlr4-go/antlr/v4/char_stream.go @@ -8,5 +8,5 @@ type CharStream interface { IntStream GetText(int, int) string GetTextFromTokens(start, end Token) string - GetTextFromInterval(*Interval) string + GetTextFromInterval(Interval) string } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/common_token_factory.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go similarity index 100% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/common_token_factory.go rename to vendor/github.com/antlr4-go/antlr/v4/common_token_factory.go diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/common_token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go similarity index 88% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/common_token_stream.go rename to vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go index c6c9485a2..b75da9df0 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/common_token_stream.go +++ b/vendor/github.com/antlr4-go/antlr/v4/common_token_stream.go @@ -28,22 +28,24 @@ type CommonTokenStream struct { // trivial with bt field. fetchedEOF bool - // index indexs into tokens of the current token (next token to consume). + // index into [tokens] of the current token (next token to consume). // tokens[p] should be LT(1). It is set to -1 when the stream is first // constructed or when SetTokenSource is called, indicating that the first token // has not yet been fetched from the token source. For additional information, - // see the documentation of IntStream for a description of initializing methods. + // see the documentation of [IntStream] for a description of initializing methods. index int - // tokenSource is the TokenSource from which tokens for the bt stream are + // tokenSource is the [TokenSource] from which tokens for the bt stream are // fetched. tokenSource TokenSource - // tokens is all tokens fetched from the token source. The list is considered a + // tokens contains all tokens fetched from the token source. The list is considered a // complete view of the input once fetchedEOF is set to true. tokens []Token } +// NewCommonTokenStream creates a new CommonTokenStream instance using the supplied lexer to produce +// tokens and will pull tokens from the given lexer channel. func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream { return &CommonTokenStream{ channel: channel, @@ -53,6 +55,7 @@ func NewCommonTokenStream(lexer Lexer, channel int) *CommonTokenStream { } } +// GetAllTokens returns all tokens currently pulled from the token source. func (c *CommonTokenStream) GetAllTokens() []Token { return c.tokens } @@ -61,9 +64,11 @@ func (c *CommonTokenStream) Mark() int { return 0 } -func (c *CommonTokenStream) Release(marker int) {} +func (c *CommonTokenStream) Release(_ int) {} -func (c *CommonTokenStream) reset() { +func (c *CommonTokenStream) Reset() { + c.fetchedEOF = false + c.tokens = make([]Token, 0) c.Seek(0) } @@ -107,7 +112,7 @@ func (c *CommonTokenStream) Consume() { // Sync makes sure index i in tokens has a token and returns true if a token is // located at index i and otherwise false. func (c *CommonTokenStream) Sync(i int) bool { - n := i - len(c.tokens) + 1 // TODO: How many more elements do we need? + n := i - len(c.tokens) + 1 // How many more elements do we need? if n > 0 { fetched := c.fetch(n) @@ -193,12 +198,13 @@ func (c *CommonTokenStream) SetTokenSource(tokenSource TokenSource) { c.tokenSource = tokenSource c.tokens = make([]Token, 0) c.index = -1 + c.fetchedEOF = false } // NextTokenOnChannel returns the index of the next token on channel given a // starting index. Returns i if tokens[i] is on channel. Returns -1 if there are -// no tokens on channel between i and EOF. -func (c *CommonTokenStream) NextTokenOnChannel(i, channel int) int { +// no tokens on channel between 'i' and [TokenEOF]. +func (c *CommonTokenStream) NextTokenOnChannel(i, _ int) int { c.Sync(i) if i >= len(c.tokens) { @@ -244,7 +250,7 @@ func (c *CommonTokenStream) GetHiddenTokensToRight(tokenIndex, channel int) []To nextOnChannel := c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel) from := tokenIndex + 1 - // If no onchannel to the right, then nextOnChannel == -1, so set to to last token + // If no onChannel to the right, then nextOnChannel == -1, so set 'to' to the last token var to int if nextOnChannel == -1 { @@ -314,7 +320,8 @@ func (c *CommonTokenStream) Index() int { } func (c *CommonTokenStream) GetAllText() string { - return c.GetTextFromInterval(nil) + c.Fill() + return c.GetTextFromInterval(NewInterval(0, len(c.tokens)-1)) } func (c *CommonTokenStream) GetTextFromTokens(start, end Token) string { @@ -329,15 +336,9 @@ func (c *CommonTokenStream) GetTextFromRuleContext(interval RuleContext) string return c.GetTextFromInterval(interval.GetSourceInterval()) } -func (c *CommonTokenStream) GetTextFromInterval(interval *Interval) string { +func (c *CommonTokenStream) GetTextFromInterval(interval Interval) string { c.lazyInit() - - if interval == nil { - c.Fill() - interval = NewInterval(0, len(c.tokens)-1) - } else { - c.Sync(interval.Stop) - } + c.Sync(interval.Stop) start := interval.Start stop := interval.Stop diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/comparators.go b/vendor/github.com/antlr4-go/antlr/v4/comparators.go similarity index 82% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/comparators.go rename to vendor/github.com/antlr4-go/antlr/v4/comparators.go index 9ea320053..7467e9b43 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/comparators.go +++ b/vendor/github.com/antlr4-go/antlr/v4/comparators.go @@ -18,17 +18,20 @@ package antlr // type safety and avoid having to implement this for every type that we want to perform comparison on. // // This comparator works by using the standard Hash() and Equals() methods of the type T that is being compared. Which -// allows us to use it in any collection instance that does nto require a special hash or equals implementation. +// allows us to use it in any collection instance that does not require a special hash or equals implementation. type ObjEqComparator[T Collectable[T]] struct{} var ( - aStateEqInst = &ObjEqComparator[ATNState]{} - aConfEqInst = &ObjEqComparator[ATNConfig]{} - aConfCompInst = &ATNConfigComparator[ATNConfig]{} - atnConfCompInst = &BaseATNConfigComparator[ATNConfig]{} + aStateEqInst = &ObjEqComparator[ATNState]{} + aConfEqInst = &ObjEqComparator[*ATNConfig]{} + + // aConfCompInst is the comparator used for the ATNConfigSet for the configLookup cache + aConfCompInst = &ATNConfigComparator[*ATNConfig]{} + atnConfCompInst = &BaseATNConfigComparator[*ATNConfig]{} dfaStateEqInst = &ObjEqComparator[*DFAState]{} semctxEqInst = &ObjEqComparator[SemanticContext]{} - atnAltCfgEqInst = &ATNAltConfigComparator[ATNConfig]{} + atnAltCfgEqInst = &ATNAltConfigComparator[*ATNConfig]{} + pContextEqInst = &ObjEqComparator[*PredictionContext]{} ) // Equals2 delegates to the Equals() method of type T @@ -44,14 +47,14 @@ func (c *ObjEqComparator[T]) Hash1(o T) int { type SemCComparator[T Collectable[T]] struct{} -// ATNConfigComparator is used as the compartor for the configLookup field of an ATNConfigSet +// ATNConfigComparator is used as the comparator for the configLookup field of an ATNConfigSet // and has a custom Equals() and Hash() implementation, because equality is not based on the // standard Hash() and Equals() methods of the ATNConfig type. type ATNConfigComparator[T Collectable[T]] struct { } // Equals2 is a custom comparator for ATNConfigs specifically for configLookup -func (c *ATNConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { +func (c *ATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool { // Same pointer, must be equal, even if both nil // @@ -72,7 +75,8 @@ func (c *ATNConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { } // Hash1 is custom hash implementation for ATNConfigs specifically for configLookup -func (c *ATNConfigComparator[T]) Hash1(o ATNConfig) int { +func (c *ATNConfigComparator[T]) Hash1(o *ATNConfig) int { + hash := 7 hash = 31*hash + o.GetState().GetStateNumber() hash = 31*hash + o.GetAlt() @@ -85,7 +89,7 @@ type ATNAltConfigComparator[T Collectable[T]] struct { } // Equals2 is a custom comparator for ATNConfigs specifically for configLookup -func (c *ATNAltConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { +func (c *ATNAltConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool { // Same pointer, must be equal, even if both nil // @@ -105,21 +109,21 @@ func (c *ATNAltConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { } // Hash1 is custom hash implementation for ATNConfigs specifically for configLookup -func (c *ATNAltConfigComparator[T]) Hash1(o ATNConfig) int { +func (c *ATNAltConfigComparator[T]) Hash1(o *ATNConfig) int { h := murmurInit(7) h = murmurUpdate(h, o.GetState().GetStateNumber()) h = murmurUpdate(h, o.GetContext().Hash()) return murmurFinish(h, 2) } -// BaseATNConfigComparator is used as the comparator for the configLookup field of a BaseATNConfigSet +// BaseATNConfigComparator is used as the comparator for the configLookup field of a ATNConfigSet // and has a custom Equals() and Hash() implementation, because equality is not based on the // standard Hash() and Equals() methods of the ATNConfig type. type BaseATNConfigComparator[T Collectable[T]] struct { } // Equals2 is a custom comparator for ATNConfigs specifically for baseATNConfigSet -func (c *BaseATNConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { +func (c *BaseATNConfigComparator[T]) Equals2(o1, o2 *ATNConfig) bool { // Same pointer, must be equal, even if both nil // @@ -141,7 +145,6 @@ func (c *BaseATNConfigComparator[T]) Equals2(o1, o2 ATNConfig) bool { // Hash1 is custom hash implementation for ATNConfigs specifically for configLookup, but in fact just // delegates to the standard Hash() method of the ATNConfig type. -func (c *BaseATNConfigComparator[T]) Hash1(o ATNConfig) int { - +func (c *BaseATNConfigComparator[T]) Hash1(o *ATNConfig) int { return o.Hash() } diff --git a/vendor/github.com/antlr4-go/antlr/v4/configuration.go b/vendor/github.com/antlr4-go/antlr/v4/configuration.go new file mode 100644 index 000000000..c2b724514 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/configuration.go @@ -0,0 +1,214 @@ +package antlr + +type runtimeConfiguration struct { + statsTraceStacks bool + lexerATNSimulatorDebug bool + lexerATNSimulatorDFADebug bool + parserATNSimulatorDebug bool + parserATNSimulatorTraceATNSim bool + parserATNSimulatorDFADebug bool + parserATNSimulatorRetryDebug bool + lRLoopEntryBranchOpt bool + memoryManager bool +} + +// Global runtime configuration +var runtimeConfig = runtimeConfiguration{ + lRLoopEntryBranchOpt: true, +} + +type runtimeOption func(*runtimeConfiguration) error + +// ConfigureRuntime allows the runtime to be configured globally setting things like trace and statistics options. +// It uses the functional options pattern for go. This is a package global function as it operates on the runtime +// configuration regardless of the instantiation of anything higher up such as a parser or lexer. Generally this is +// used for debugging/tracing/statistics options, which are usually used by the runtime maintainers (or rather the +// only maintainer). However, it is possible that you might want to use this to set a global option concerning the +// memory allocation type used by the runtime such as sync.Pool or not. +// +// The options are applied in the order they are passed in, so the last option will override any previous options. +// +// For example, if you want to turn on the collection create point stack flag to true, you can do: +// +// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true)) +// +// If you want to turn it off, you can do: +// +// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false)) +func ConfigureRuntime(options ...runtimeOption) error { + for _, option := range options { + err := option(&runtimeConfig) + if err != nil { + return err + } + } + return nil +} + +// WithStatsTraceStacks sets the global flag indicating whether to collect stack traces at the create-point of +// certain structs, such as collections, or the use point of certain methods such as Put(). +// Because this can be expensive, it is turned off by default. However, it +// can be useful to track down exactly where memory is being created and used. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithStatsTraceStacks(false)) +func WithStatsTraceStacks(trace bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.statsTraceStacks = trace + return nil + } +} + +// WithLexerATNSimulatorDebug sets the global flag indicating whether to log debug information from the lexer [ATN] +// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful +// to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDebug(false)) +func WithLexerATNSimulatorDebug(debug bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.lexerATNSimulatorDebug = debug + return nil + } +} + +// WithLexerATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the lexer [ATN] [DFA] +// simulator. This is useful for debugging lexer issues by comparing the output with the Java runtime. Only useful +// to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithLexerATNSimulatorDFADebug(false)) +func WithLexerATNSimulatorDFADebug(debug bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.lexerATNSimulatorDFADebug = debug + return nil + } +} + +// WithParserATNSimulatorDebug sets the global flag indicating whether to log debug information from the parser [ATN] +// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful +// to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDebug(false)) +func WithParserATNSimulatorDebug(debug bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.parserATNSimulatorDebug = debug + return nil + } +} + +// WithParserATNSimulatorTraceATNSim sets the global flag indicating whether to log trace information from the parser [ATN] simulator +// [DFA]. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful +// to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorTraceATNSim(false)) +func WithParserATNSimulatorTraceATNSim(trace bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.parserATNSimulatorTraceATNSim = trace + return nil + } +} + +// WithParserATNSimulatorDFADebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA] +// simulator. This is useful for debugging parser issues by comparing the output with the Java runtime. Only useful +// to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorDFADebug(false)) +func WithParserATNSimulatorDFADebug(debug bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.parserATNSimulatorDFADebug = debug + return nil + } +} + +// WithParserATNSimulatorRetryDebug sets the global flag indicating whether to log debug information from the parser [ATN] [DFA] +// simulator when retrying a decision. This is useful for debugging parser issues by comparing the output with the Java runtime. +// Only useful to the runtime maintainers. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithParserATNSimulatorRetryDebug(false)) +func WithParserATNSimulatorRetryDebug(debug bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.parserATNSimulatorRetryDebug = debug + return nil + } +} + +// WithLRLoopEntryBranchOpt sets the global flag indicating whether let recursive loop operations should be +// optimized or not. This is useful for debugging parser issues by comparing the output with the Java runtime. +// It turns off the functionality of [canDropLoopEntryEdgeInLeftRecursiveRule] in [ParserATNSimulator]. +// +// Note that default is to use this optimization. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(true)) +// +// You can turn it off at any time using: +// +// antlr.ConfigureRuntime(antlr.WithLRLoopEntryBranchOpt(false)) +func WithLRLoopEntryBranchOpt(off bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.lRLoopEntryBranchOpt = off + return nil + } +} + +// WithMemoryManager sets the global flag indicating whether to use the memory manager or not. This is useful +// for poorly constructed grammars that create a lot of garbage. It turns on the functionality of [memoryManager], which +// will intercept garbage collection and cause available memory to be reused. At the end of the day, this is no substitute +// for fixing your grammar by ridding yourself of extreme ambiguity. BUt if you are just trying to reuse an opensource +// grammar, this may help make it more practical. +// +// Note that default is to use normal Go memory allocation and not pool memory. +// +// Use: +// +// antlr.ConfigureRuntime(antlr.WithMemoryManager(true)) +// +// Note that if you turn this on, you should probably leave it on. You should use only one memory strategy or the other +// and should remember to nil out any references to the parser or lexer when you are done with them. +func WithMemoryManager(use bool) runtimeOption { + return func(config *runtimeConfiguration) error { + config.memoryManager = use + return nil + } +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa.go b/vendor/github.com/antlr4-go/antlr/v4/dfa.go similarity index 76% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa.go rename to vendor/github.com/antlr4-go/antlr/v4/dfa.go index bfd43e1f7..6b63eb158 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa.go +++ b/vendor/github.com/antlr4-go/antlr/v4/dfa.go @@ -4,6 +4,8 @@ package antlr +// DFA represents the Deterministic Finite Automaton used by the recognizer, including all the states it can +// reach and the transitions between them. type DFA struct { // atnStartState is the ATN state in which this was created atnStartState DecisionState @@ -12,10 +14,9 @@ type DFA struct { // states is all the DFA states. Use Map to get the old state back; Set can only // indicate whether it is there. Go maps implement key hash collisions and so on and are very - // good, but the DFAState is an object and can't be used directly as the key as it can in say JAva + // good, but the DFAState is an object and can't be used directly as the key as it can in say Java // amd C#, whereby if the hashcode is the same for two objects, then Equals() is called against them - // to see if they really are the same object. - // + // to see if they really are the same object. Hence, we have our own map storage. // states *JStore[*DFAState, *ObjEqComparator[*DFAState]] @@ -32,11 +33,11 @@ func NewDFA(atnStartState DecisionState, decision int) *DFA { dfa := &DFA{ atnStartState: atnStartState, decision: decision, - states: NewJStore[*DFAState, *ObjEqComparator[*DFAState]](dfaStateEqInst), + states: nil, // Lazy initialize } if s, ok := atnStartState.(*StarLoopEntryState); ok && s.precedenceRuleDecision { dfa.precedenceDfa = true - dfa.s0 = NewDFAState(-1, NewBaseATNConfigSet(false)) + dfa.s0 = NewDFAState(-1, NewATNConfigSet(false)) dfa.s0.isAcceptState = false dfa.s0.requiresFullContext = false } @@ -95,12 +96,11 @@ func (d *DFA) getPrecedenceDfa() bool { // true or nil otherwise, and d.precedenceDfa is updated. func (d *DFA) setPrecedenceDfa(precedenceDfa bool) { if d.getPrecedenceDfa() != precedenceDfa { - d.states = NewJStore[*DFAState, *ObjEqComparator[*DFAState]](dfaStateEqInst) + d.states = nil // Lazy initialize d.numstates = 0 if precedenceDfa { - precedenceState := NewDFAState(-1, NewBaseATNConfigSet(false)) - + precedenceState := NewDFAState(-1, NewATNConfigSet(false)) precedenceState.setEdges(make([]*DFAState, 0)) precedenceState.isAcceptState = false precedenceState.requiresFullContext = false @@ -113,6 +113,31 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) { } } +// Len returns the number of states in d. We use this instead of accessing states directly so that we can implement lazy +// instantiation of the states JMap. +func (d *DFA) Len() int { + if d.states == nil { + return 0 + } + return d.states.Len() +} + +// Get returns a state that matches s if it is present in the DFA state set. We defer to this +// function instead of accessing states directly so that we can implement lazy instantiation of the states JMap. +func (d *DFA) Get(s *DFAState) (*DFAState, bool) { + if d.states == nil { + return nil, false + } + return d.states.Get(s) +} + +func (d *DFA) Put(s *DFAState) (*DFAState, bool) { + if d.states == nil { + d.states = NewJStore[*DFAState, *ObjEqComparator[*DFAState]](dfaStateEqInst, DFAStateCollection, "DFA via DFA.Put") + } + return d.states.Put(s) +} + func (d *DFA) getS0() *DFAState { return d.s0 } @@ -121,9 +146,11 @@ func (d *DFA) setS0(s *DFAState) { d.s0 = s } -// sortedStates returns the states in d sorted by their state number. +// sortedStates returns the states in d sorted by their state number, or an empty set if d.states is nil. func (d *DFA) sortedStates() []*DFAState { - + if d.states == nil { + return []*DFAState{} + } vs := d.states.SortedSlice(func(i, j *DFAState) bool { return i.stateNumber < j.stateNumber }) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_serializer.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go similarity index 97% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_serializer.go rename to vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go index 84d0a31e5..0e1100989 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_serializer.go +++ b/vendor/github.com/antlr4-go/antlr/v4/dfa_serializer.go @@ -10,7 +10,7 @@ import ( "strings" ) -// DFASerializer is a DFA walker that knows how to dump them to serialized +// DFASerializer is a DFA walker that knows how to dump the DFA states to serialized // strings. type DFASerializer struct { dfa *DFA diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_state.go b/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go similarity index 81% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_state.go rename to vendor/github.com/antlr4-go/antlr/v4/dfa_state.go index c90dec55c..654143074 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/dfa_state.go +++ b/vendor/github.com/antlr4-go/antlr/v4/dfa_state.go @@ -22,30 +22,31 @@ func (p *PredPrediction) String() string { return "(" + fmt.Sprint(p.pred) + ", " + fmt.Sprint(p.alt) + ")" } -// DFAState represents a set of possible ATN configurations. As Aho, Sethi, +// DFAState represents a set of possible [ATN] configurations. As Aho, Sethi, // Ullman p. 117 says: "The DFA uses its state to keep track of all possible // states the ATN can be in after reading each input symbol. That is to say, -// after reading input a1a2..an, the DFA is in a state that represents the +// after reading input a1, a2,..an, the DFA is in a state that represents the // subset T of the states of the ATN that are reachable from the ATN's start -// state along some path labeled a1a2..an." In conventional NFA-to-DFA -// conversion, therefore, the subset T would be a bitset representing the set of -// states the ATN could be in. We need to track the alt predicted by each state +// state along some path labeled a1a2..an." +// +// In conventional NFA-to-DFA conversion, therefore, the subset T would be a bitset representing the set of +// states the [ATN] could be in. We need to track the alt predicted by each state // as well, however. More importantly, we need to maintain a stack of states, // tracking the closure operations as they jump from rule to rule, emulating // rule invocations (method calls). I have to add a stack to simulate the proper // lookahead sequences for the underlying LL grammar from which the ATN was // derived. // -// I use a set of ATNConfig objects, not simple states. An ATNConfig is both a -// state (ala normal conversion) and a RuleContext describing the chain of rules +// I use a set of [ATNConfig] objects, not simple states. An [ATNConfig] is both a +// state (ala normal conversion) and a [RuleContext] describing the chain of rules // (if any) followed to arrive at that state. // -// A DFAState may have multiple references to a particular state, but with -// different ATN contexts (with same or different alts) meaning that state was +// A [DFAState] may have multiple references to a particular state, but with +// different [ATN] contexts (with same or different alts) meaning that state was // reached via a different set of rule invocations. type DFAState struct { stateNumber int - configs ATNConfigSet + configs *ATNConfigSet // edges elements point to the target of the symbol. Shift up by 1 so (-1) // Token.EOF maps to the first element. @@ -53,7 +54,7 @@ type DFAState struct { isAcceptState bool - // prediction is the ttype we match or alt we predict if the state is accept. + // prediction is the 'ttype' we match or alt we predict if the state is 'accept'. // Set to ATN.INVALID_ALT_NUMBER when predicates != nil or // requiresFullContext. prediction int @@ -81,9 +82,9 @@ type DFAState struct { predicates []*PredPrediction } -func NewDFAState(stateNumber int, configs ATNConfigSet) *DFAState { +func NewDFAState(stateNumber int, configs *ATNConfigSet) *DFAState { if configs == nil { - configs = NewBaseATNConfigSet(false) + configs = NewATNConfigSet(false) } return &DFAState{configs: configs, stateNumber: stateNumber} @@ -94,7 +95,7 @@ func (d *DFAState) GetAltSet() []int { var alts []int if d.configs != nil { - for _, c := range d.configs.GetItems() { + for _, c := range d.configs.configs { alts = append(alts, c.GetAlt()) } } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/diagnostic_error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go similarity index 92% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/diagnostic_error_listener.go rename to vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go index c55bcc19b..bd2cd8bc3 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/diagnostic_error_listener.go +++ b/vendor/github.com/antlr4-go/antlr/v4/diagnostic_error_listener.go @@ -33,6 +33,7 @@ type DiagnosticErrorListener struct { exactOnly bool } +//goland:noinspection GoUnusedExportedFunction func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener { n := new(DiagnosticErrorListener) @@ -42,7 +43,7 @@ func NewDiagnosticErrorListener(exactOnly bool) *DiagnosticErrorListener { return n } -func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) { +func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) { if d.exactOnly && !exact { return } @@ -55,7 +56,7 @@ func (d *DiagnosticErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, s recognizer.NotifyErrorListeners(msg, nil, nil) } -func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) { +func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, _ *BitSet, _ *ATNConfigSet) { msg := "reportAttemptingFullContext d=" + d.getDecisionDescription(recognizer, dfa) + @@ -64,7 +65,7 @@ func (d *DiagnosticErrorListener) ReportAttemptingFullContext(recognizer Parser, recognizer.NotifyErrorListeners(msg, nil, nil) } -func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) { +func (d *DiagnosticErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, _ int, _ *ATNConfigSet) { msg := "reportContextSensitivity d=" + d.getDecisionDescription(recognizer, dfa) + ", input='" + @@ -96,12 +97,12 @@ func (d *DiagnosticErrorListener) getDecisionDescription(recognizer Parser, dfa // @param configs The conflicting or ambiguous configuration set. // @return Returns {@code ReportedAlts} if it is not {@code nil}, otherwise // returns the set of alternatives represented in {@code configs}. -func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set ATNConfigSet) *BitSet { +func (d *DiagnosticErrorListener) getConflictingAlts(ReportedAlts *BitSet, set *ATNConfigSet) *BitSet { if ReportedAlts != nil { return ReportedAlts } result := NewBitSet() - for _, c := range set.GetItems() { + for _, c := range set.configs { result.add(c.GetAlt()) } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_listener.go b/vendor/github.com/antlr4-go/antlr/v4/error_listener.go similarity index 62% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_listener.go rename to vendor/github.com/antlr4-go/antlr/v4/error_listener.go index f679f0dcd..21a021643 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_listener.go +++ b/vendor/github.com/antlr4-go/antlr/v4/error_listener.go @@ -16,28 +16,29 @@ import ( type ErrorListener interface { SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) - ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) - ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) - ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) + ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) + ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) + ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) } type DefaultErrorListener struct { } +//goland:noinspection GoUnusedExportedFunction func NewDefaultErrorListener() *DefaultErrorListener { return new(DefaultErrorListener) } -func (d *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) { +func (d *DefaultErrorListener) SyntaxError(_ Recognizer, _ interface{}, _, _ int, _ string, _ RecognitionException) { } -func (d *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) { +func (d *DefaultErrorListener) ReportAmbiguity(_ Parser, _ *DFA, _, _ int, _ bool, _ *BitSet, _ *ATNConfigSet) { } -func (d *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) { +func (d *DefaultErrorListener) ReportAttemptingFullContext(_ Parser, _ *DFA, _, _ int, _ *BitSet, _ *ATNConfigSet) { } -func (d *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) { +func (d *DefaultErrorListener) ReportContextSensitivity(_ Parser, _ *DFA, _, _, _ int, _ *ATNConfigSet) { } type ConsoleErrorListener struct { @@ -48,21 +49,16 @@ func NewConsoleErrorListener() *ConsoleErrorListener { return new(ConsoleErrorListener) } -// Provides a default instance of {@link ConsoleErrorListener}. +// ConsoleErrorListenerINSTANCE provides a default instance of {@link ConsoleErrorListener}. var ConsoleErrorListenerINSTANCE = NewConsoleErrorListener() -// {@inheritDoc} +// SyntaxError prints messages to System.err containing the +// values of line, charPositionInLine, and msg using +// the following format: // -//

-// This implementation prints messages to {@link System//err} containing the -// values of {@code line}, {@code charPositionInLine}, and {@code msg} using -// the following format.

-// -//
-// line line:charPositionInLine msg
-// 
-func (c *ConsoleErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) { - fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg) +// line : +func (c *ConsoleErrorListener) SyntaxError(_ Recognizer, _ interface{}, line, column int, msg string, _ RecognitionException) { + _, _ = fmt.Fprintln(os.Stderr, "line "+strconv.Itoa(line)+":"+strconv.Itoa(column)+" "+msg) } type ProxyErrorListener struct { @@ -85,19 +81,19 @@ func (p *ProxyErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol } } -func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) { +func (p *ProxyErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs *ATNConfigSet) { for _, d := range p.delegates { d.ReportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) } } -func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) { +func (p *ProxyErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs *ATNConfigSet) { for _, d := range p.delegates { d.ReportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) } } -func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) { +func (p *ProxyErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs *ATNConfigSet) { for _, d := range p.delegates { d.ReportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_strategy.go b/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go similarity index 58% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_strategy.go rename to vendor/github.com/antlr4-go/antlr/v4/error_strategy.go index 5c0a637ba..9db2be1c7 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/error_strategy.go +++ b/vendor/github.com/antlr4-go/antlr/v4/error_strategy.go @@ -21,8 +21,8 @@ type ErrorStrategy interface { ReportMatch(Parser) } -// This is the default implementation of {@link ANTLRErrorStrategy} used for -// error Reporting and recovery in ANTLR parsers. +// DefaultErrorStrategy is the default implementation of ANTLRErrorStrategy used for +// error reporting and recovery in ANTLR parsers. type DefaultErrorStrategy struct { errorRecoveryMode bool lastErrorIndex int @@ -46,7 +46,7 @@ func NewDefaultErrorStrategy() *DefaultErrorStrategy { // The index into the input stream where the last error occurred. // This is used to prevent infinite loops where an error is found // but no token is consumed during recovery...another error is found, - // ad nauseum. This is a failsafe mechanism to guarantee that at least + // ad nauseam. This is a failsafe mechanism to guarantee that at least // one token/tree node is consumed for two errors. // d.lastErrorIndex = -1 @@ -62,50 +62,37 @@ func (d *DefaultErrorStrategy) reset(recognizer Parser) { // This method is called to enter error recovery mode when a recognition // exception is Reported. -// -// @param recognizer the parser instance -func (d *DefaultErrorStrategy) beginErrorCondition(recognizer Parser) { +func (d *DefaultErrorStrategy) beginErrorCondition(_ Parser) { d.errorRecoveryMode = true } -func (d *DefaultErrorStrategy) InErrorRecoveryMode(recognizer Parser) bool { +func (d *DefaultErrorStrategy) InErrorRecoveryMode(_ Parser) bool { return d.errorRecoveryMode } // This method is called to leave error recovery mode after recovering from // a recognition exception. -// -// @param recognizer -func (d *DefaultErrorStrategy) endErrorCondition(recognizer Parser) { +func (d *DefaultErrorStrategy) endErrorCondition(_ Parser) { d.errorRecoveryMode = false d.lastErrorStates = nil d.lastErrorIndex = -1 } -// {@inheritDoc} -// -//

The default implementation simply calls {@link //endErrorCondition}.

+// ReportMatch is the default implementation of error matching and simply calls endErrorCondition. func (d *DefaultErrorStrategy) ReportMatch(recognizer Parser) { d.endErrorCondition(recognizer) } -// {@inheritDoc} -// -//

The default implementation returns immediately if the handler is already -// in error recovery mode. Otherwise, it calls {@link //beginErrorCondition} -// and dispatches the Reporting task based on the runtime type of {@code e} -// according to the following table.

-// -//
    -//
  • {@link NoViableAltException}: Dispatches the call to -// {@link //ReportNoViableAlternative}
  • -//
  • {@link InputMisMatchException}: Dispatches the call to -// {@link //ReportInputMisMatch}
  • -//
  • {@link FailedPredicateException}: Dispatches the call to -// {@link //ReportFailedPredicate}
  • -//
  • All other types: calls {@link Parser//NotifyErrorListeners} to Report -// the exception
  • -//
+// ReportError is the default implementation of error reporting. +// It returns immediately if the handler is already +// in error recovery mode. Otherwise, it calls [beginErrorCondition] +// and dispatches the Reporting task based on the runtime type of e +// according to the following table. +// +// [NoViableAltException] : Dispatches the call to [ReportNoViableAlternative] +// [InputMisMatchException] : Dispatches the call to [ReportInputMisMatch] +// [FailedPredicateException] : Dispatches the call to [ReportFailedPredicate] +// All other types : Calls [NotifyErrorListeners] to Report the exception func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionException) { // if we've already Reported an error and have not Matched a token // yet successfully, don't Report any errors. @@ -128,12 +115,10 @@ func (d *DefaultErrorStrategy) ReportError(recognizer Parser, e RecognitionExcep } } -// {@inheritDoc} -// -//

The default implementation reSynchronizes the parser by consuming tokens -// until we find one in the reSynchronization set--loosely the set of tokens -// that can follow the current rule.

-func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException) { +// Recover is the default recovery implementation. +// It reSynchronizes the parser by consuming tokens until we find one in the reSynchronization set - +// loosely the set of tokens that can follow the current rule. +func (d *DefaultErrorStrategy) Recover(recognizer Parser, _ RecognitionException) { if d.lastErrorIndex == recognizer.GetInputStream().Index() && d.lastErrorStates != nil && d.lastErrorStates.contains(recognizer.GetState()) { @@ -148,54 +133,58 @@ func (d *DefaultErrorStrategy) Recover(recognizer Parser, e RecognitionException d.lastErrorStates = NewIntervalSet() } d.lastErrorStates.addOne(recognizer.GetState()) - followSet := d.getErrorRecoverySet(recognizer) + followSet := d.GetErrorRecoverySet(recognizer) d.consumeUntil(recognizer, followSet) } -// The default implementation of {@link ANTLRErrorStrategy//Sync} makes sure -// that the current lookahead symbol is consistent with what were expecting -// at d point in the ATN. You can call d anytime but ANTLR only -// generates code to check before subrules/loops and each iteration. +// Sync is the default implementation of error strategy synchronization. +// +// This Sync makes sure that the current lookahead symbol is consistent with what were expecting +// at this point in the [ATN]. You can call this anytime but ANTLR only +// generates code to check before sub-rules/loops and each iteration. // -//

Implements Jim Idle's magic Sync mechanism in closures and optional -// subrules. E.g.,

+// Implements [Jim Idle]'s magic Sync mechanism in closures and optional +// sub-rules. E.g.: // -//
-// a : Sync ( stuff Sync )*
-// Sync : {consume to what can follow Sync}
-// 
+// a : Sync ( stuff Sync )* +// Sync : {consume to what can follow Sync} // -// At the start of a sub rule upon error, {@link //Sync} performs single +// At the start of a sub-rule upon error, Sync performs single // token deletion, if possible. If it can't do that, it bails on the current // rule and uses the default error recovery, which consumes until the // reSynchronization set of the current rule. // -//

If the sub rule is optional ({@code (...)?}, {@code (...)*}, or block -// with an empty alternative), then the expected set includes what follows -// the subrule.

+// If the sub-rule is optional +// +// ({@code (...)?}, {@code (...)*}, // -//

During loop iteration, it consumes until it sees a token that can start a -// sub rule or what follows loop. Yes, that is pretty aggressive. We opt to -// stay in the loop as long as possible.

+// or a block with an empty alternative), then the expected set includes what follows +// the sub-rule. // -//

ORIGINS

+// During loop iteration, it consumes until it sees a token that can start a +// sub-rule or what follows loop. Yes, that is pretty aggressive. We opt to +// stay in the loop as long as possible. // -//

Previous versions of ANTLR did a poor job of their recovery within loops. +// # Origins +// +// Previous versions of ANTLR did a poor job of their recovery within loops. // A single mismatch token or missing token would force the parser to bail -// out of the entire rules surrounding the loop. So, for rule

+// out of the entire rules surrounding the loop. So, for rule: // -//
-// classfunc : 'class' ID '{' member* '}'
-// 
+// classfunc : 'class' ID '{' member* '}' // // input with an extra token between members would force the parser to // consume until it found the next class definition rather than the next // member definition of the current class. // -//

This functionality cost a little bit of effort because the parser has to -// compare token set at the start of the loop and at each iteration. If for -// some reason speed is suffering for you, you can turn off d -// functionality by simply overriding d method as a blank { }.

+// This functionality cost a bit of effort because the parser has to +// compare the token set at the start of the loop and at each iteration. If for +// some reason speed is suffering for you, you can turn off this +// functionality by simply overriding this method as empty: +// +// { } +// +// [Jim Idle]: https://github.com/jimidle func (d *DefaultErrorStrategy) Sync(recognizer Parser) { // If already recovering, don't try to Sync if d.InErrorRecoveryMode(recognizer) { @@ -217,25 +206,21 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) { if d.SingleTokenDeletion(recognizer) != nil { return } - panic(NewInputMisMatchException(recognizer)) + recognizer.SetError(NewInputMisMatchException(recognizer)) case ATNStatePlusLoopBack, ATNStateStarLoopBack: d.ReportUnwantedToken(recognizer) expecting := NewIntervalSet() expecting.addSet(recognizer.GetExpectedTokens()) - whatFollowsLoopIterationOrRule := expecting.addSet(d.getErrorRecoverySet(recognizer)) + whatFollowsLoopIterationOrRule := expecting.addSet(d.GetErrorRecoverySet(recognizer)) d.consumeUntil(recognizer, whatFollowsLoopIterationOrRule) default: // do nothing if we can't identify the exact kind of ATN state } } -// This is called by {@link //ReportError} when the exception is a -// {@link NoViableAltException}. -// -// @see //ReportError +// ReportNoViableAlternative is called by [ReportError] when the exception is a [NoViableAltException]. // -// @param recognizer the parser instance -// @param e the recognition exception +// See also [ReportError] func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *NoViableAltException) { tokens := recognizer.GetTokenStream() var input string @@ -252,48 +237,38 @@ func (d *DefaultErrorStrategy) ReportNoViableAlternative(recognizer Parser, e *N recognizer.NotifyErrorListeners(msg, e.offendingToken, e) } -// This is called by {@link //ReportError} when the exception is an -// {@link InputMisMatchException}. +// ReportInputMisMatch is called by [ReportError] when the exception is an [InputMisMatchException] // -// @see //ReportError -// -// @param recognizer the parser instance -// @param e the recognition exception -func (this *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) { - msg := "mismatched input " + this.GetTokenErrorDisplay(e.offendingToken) + +// See also: [ReportError] +func (d *DefaultErrorStrategy) ReportInputMisMatch(recognizer Parser, e *InputMisMatchException) { + msg := "mismatched input " + d.GetTokenErrorDisplay(e.offendingToken) + " expecting " + e.getExpectedTokens().StringVerbose(recognizer.GetLiteralNames(), recognizer.GetSymbolicNames(), false) recognizer.NotifyErrorListeners(msg, e.offendingToken, e) } -// This is called by {@link //ReportError} when the exception is a -// {@link FailedPredicateException}. -// -// @see //ReportError +// ReportFailedPredicate is called by [ReportError] when the exception is a [FailedPredicateException]. // -// @param recognizer the parser instance -// @param e the recognition exception +// See also: [ReportError] func (d *DefaultErrorStrategy) ReportFailedPredicate(recognizer Parser, e *FailedPredicateException) { ruleName := recognizer.GetRuleNames()[recognizer.GetParserRuleContext().GetRuleIndex()] msg := "rule " + ruleName + " " + e.message recognizer.NotifyErrorListeners(msg, e.offendingToken, e) } -// This method is called to Report a syntax error which requires the removal +// ReportUnwantedToken is called to report a syntax error that requires the removal // of a token from the input stream. At the time d method is called, the -// erroneous symbol is current {@code LT(1)} symbol and has not yet been -// removed from the input stream. When d method returns, -// {@code recognizer} is in error recovery mode. +// erroneous symbol is the current LT(1) symbol and has not yet been +// removed from the input stream. When this method returns, +// recognizer is in error recovery mode. // -//

This method is called when {@link //singleTokenDeletion} identifies +// This method is called when singleTokenDeletion identifies // single-token deletion as a viable recovery strategy for a mismatched -// input error.

+// input error. // -//

The default implementation simply returns if the handler is already in -// error recovery mode. Otherwise, it calls {@link //beginErrorCondition} to +// The default implementation simply returns if the handler is already in +// error recovery mode. Otherwise, it calls beginErrorCondition to // enter error recovery mode, followed by calling -// {@link Parser//NotifyErrorListeners}.

-// -// @param recognizer the parser instance +// [NotifyErrorListeners] func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) { if d.InErrorRecoveryMode(recognizer) { return @@ -307,21 +282,18 @@ func (d *DefaultErrorStrategy) ReportUnwantedToken(recognizer Parser) { recognizer.NotifyErrorListeners(msg, t, nil) } -// This method is called to Report a syntax error which requires the -// insertion of a missing token into the input stream. At the time d -// method is called, the missing token has not yet been inserted. When d -// method returns, {@code recognizer} is in error recovery mode. +// ReportMissingToken is called to report a syntax error which requires the +// insertion of a missing token into the input stream. At the time this +// method is called, the missing token has not yet been inserted. When this +// method returns, recognizer is in error recovery mode. // -//

This method is called when {@link //singleTokenInsertion} identifies +// This method is called when singleTokenInsertion identifies // single-token insertion as a viable recovery strategy for a mismatched -// input error.

+// input error. // -//

The default implementation simply returns if the handler is already in -// error recovery mode. Otherwise, it calls {@link //beginErrorCondition} to -// enter error recovery mode, followed by calling -// {@link Parser//NotifyErrorListeners}.

-// -// @param recognizer the parser instance +// The default implementation simply returns if the handler is already in +// error recovery mode. Otherwise, it calls beginErrorCondition to +// enter error recovery mode, followed by calling [NotifyErrorListeners] func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) { if d.InErrorRecoveryMode(recognizer) { return @@ -334,54 +306,48 @@ func (d *DefaultErrorStrategy) ReportMissingToken(recognizer Parser) { recognizer.NotifyErrorListeners(msg, t, nil) } -//

The default implementation attempts to recover from the mismatched input +// The RecoverInline default implementation attempts to recover from the mismatched input // by using single token insertion and deletion as described below. If the -// recovery attempt fails, d method panics an -// {@link InputMisMatchException}.

+// recovery attempt fails, this method panics with [InputMisMatchException}. +// TODO: Not sure that panic() is the right thing to do here - JI // -//

EXTRA TOKEN (single token deletion)

+// # EXTRA TOKEN (single token deletion) // -//

{@code LA(1)} is not what we are looking for. If {@code LA(2)} has the -// right token, however, then assume {@code LA(1)} is some extra spurious +// LA(1) is not what we are looking for. If LA(2) has the +// right token, however, then assume LA(1) is some extra spurious // token and delete it. Then consume and return the next token (which was -// the {@code LA(2)} token) as the successful result of the Match operation.

+// the LA(2) token) as the successful result of the Match operation. // -//

This recovery strategy is implemented by {@link -// //singleTokenDeletion}.

+// # This recovery strategy is implemented by singleTokenDeletion // -//

MISSING TOKEN (single token insertion)

+// # MISSING TOKEN (single token insertion) // -//

If current token (at {@code LA(1)}) is consistent with what could come -// after the expected {@code LA(1)} token, then assume the token is missing -// and use the parser's {@link TokenFactory} to create it on the fly. The -// "insertion" is performed by returning the created token as the successful -// result of the Match operation.

+// If current token -at LA(1) - is consistent with what could come +// after the expected LA(1) token, then assume the token is missing +// and use the parser's [TokenFactory] to create it on the fly. The +// “insertion” is performed by returning the created token as the successful +// result of the Match operation. // -//

This recovery strategy is implemented by {@link -// //singleTokenInsertion}.

+// This recovery strategy is implemented by [SingleTokenInsertion]. // -//

EXAMPLE

+// # Example // -//

For example, Input {@code i=(3} is clearly missing the {@code ')'}. When -// the parser returns from the nested call to {@code expr}, it will have -// call chain:

+// For example, Input i=(3 is clearly missing the ')'. When +// the parser returns from the nested call to expr, it will have +// call the chain: // -//
-// stat &rarr expr &rarr atom
-// 
+// stat → expr → atom // -// and it will be trying to Match the {@code ')'} at d point in the +// and it will be trying to Match the ')' at this point in the // derivation: // -//
-// => ID '=' '(' INT ')' ('+' atom)* ”
-// ^
-// 
+// : ID '=' '(' INT ')' ('+' atom)* ';' +// ^ // -// The attempt to Match {@code ')'} will fail when it sees {@code ”} and -// call {@link //recoverInline}. To recover, it sees that {@code LA(1)==”} -// is in the set of tokens that can follow the {@code ')'} token reference -// in rule {@code atom}. It can assume that you forgot the {@code ')'}. +// The attempt to [Match] ')' will fail when it sees ';' and +// call [RecoverInline]. To recover, it sees that LA(1)==';' +// is in the set of tokens that can follow the ')' token reference +// in rule atom. It can assume that you forgot the ')'. func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token { // SINGLE TOKEN DELETION MatchedSymbol := d.SingleTokenDeletion(recognizer) @@ -396,24 +362,24 @@ func (d *DefaultErrorStrategy) RecoverInline(recognizer Parser) Token { return d.GetMissingSymbol(recognizer) } // even that didn't work must panic the exception - panic(NewInputMisMatchException(recognizer)) + recognizer.SetError(NewInputMisMatchException(recognizer)) + return nil } -// This method implements the single-token insertion inline error recovery -// strategy. It is called by {@link //recoverInline} if the single-token +// SingleTokenInsertion implements the single-token insertion inline error recovery +// strategy. It is called by [RecoverInline] if the single-token // deletion strategy fails to recover from the mismatched input. If this // method returns {@code true}, {@code recognizer} will be in error recovery // mode. // -//

This method determines whether or not single-token insertion is viable by -// checking if the {@code LA(1)} input symbol could be successfully Matched -// if it were instead the {@code LA(2)} symbol. If d method returns +// This method determines whether single-token insertion is viable by +// checking if the LA(1) input symbol could be successfully Matched +// if it were instead the LA(2) symbol. If this method returns // {@code true}, the caller is responsible for creating and inserting a -// token with the correct type to produce d behavior.

+// token with the correct type to produce this behavior.

// -// @param recognizer the parser instance -// @return {@code true} if single-token insertion is a viable recovery -// strategy for the current mismatched input, otherwise {@code false} +// This func returns true if single-token insertion is a viable recovery +// strategy for the current mismatched input. func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool { currentSymbolType := recognizer.GetTokenStream().LA(1) // if current token is consistent with what could come after current @@ -431,23 +397,21 @@ func (d *DefaultErrorStrategy) SingleTokenInsertion(recognizer Parser) bool { return false } -// This method implements the single-token deletion inline error recovery -// strategy. It is called by {@link //recoverInline} to attempt to recover +// SingleTokenDeletion implements the single-token deletion inline error recovery +// strategy. It is called by [RecoverInline] to attempt to recover // from mismatched input. If this method returns nil, the parser and error // handler state will not have changed. If this method returns non-nil, -// {@code recognizer} will not be in error recovery mode since the +// recognizer will not be in error recovery mode since the // returned token was a successful Match. // -//

If the single-token deletion is successful, d method calls -// {@link //ReportUnwantedToken} to Report the error, followed by -// {@link Parser//consume} to actually "delete" the extraneous token. Then, -// before returning {@link //ReportMatch} is called to signal a successful -// Match.

+// If the single-token deletion is successful, this method calls +// [ReportUnwantedToken] to Report the error, followed by +// [Consume] to actually “delete” the extraneous token. Then, +// before returning, [ReportMatch] is called to signal a successful +// Match. // -// @param recognizer the parser instance -// @return the successfully Matched {@link Token} instance if single-token -// deletion successfully recovers from the mismatched input, otherwise -// {@code nil} +// The func returns the successfully Matched [Token] instance if single-token +// deletion successfully recovers from the mismatched input, otherwise nil. func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token { NextTokenType := recognizer.GetTokenStream().LA(2) expecting := d.GetExpectedTokens(recognizer) @@ -467,24 +431,28 @@ func (d *DefaultErrorStrategy) SingleTokenDeletion(recognizer Parser) Token { return nil } -// Conjure up a missing token during error recovery. +// GetMissingSymbol conjures up a missing token during error recovery. // // The recognizer attempts to recover from single missing // symbols. But, actions might refer to that missing symbol. -// For example, x=ID {f($x)}. The action clearly assumes +// For example: +// +// x=ID {f($x)}. +// +// The action clearly assumes // that there has been an identifier Matched previously and that // $x points at that token. If that token is missing, but // the next token in the stream is what we want we assume that -// d token is missing and we keep going. Because we +// this token is missing, and we keep going. Because we // have to return some token to replace the missing token, // we have to conjure one up. This method gives the user control // over the tokens returned for missing tokens. Mostly, // you will want to create something special for identifier // tokens. For literals such as '{' and ',', the default // action in the parser or tree parser works. It simply creates -// a CommonToken of the appropriate type. The text will be the token. -// If you change what tokens must be created by the lexer, -// override d method to create the appropriate tokens. +// a [CommonToken] of the appropriate type. The text will be the token name. +// If you need to change which tokens must be created by the lexer, +// override this method to create the appropriate tokens. func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token { currentSymbol := recognizer.GetCurrentToken() expecting := d.GetExpectedTokens(recognizer) @@ -498,7 +466,7 @@ func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token { if expectedTokenType > 0 && expectedTokenType < len(ln) { tokenText = "" } else { - tokenText = "" // TODO matches the JS impl + tokenText = "" // TODO: matches the JS impl } } current := currentSymbol @@ -516,13 +484,13 @@ func (d *DefaultErrorStrategy) GetExpectedTokens(recognizer Parser) *IntervalSet return recognizer.GetExpectedTokens() } -// How should a token be displayed in an error message? The default -// is to display just the text, but during development you might -// want to have a lot of information spit out. Override in that case -// to use t.String() (which, for CommonToken, dumps everything about +// GetTokenErrorDisplay determines how a token should be displayed in an error message. +// The default is to display just the text, but during development you might +// want to have a lot of information spit out. Override this func in that case +// to use t.String() (which, for [CommonToken], dumps everything about // the token). This is better than forcing you to override a method in // your token objects because you don't have to go modify your lexer -// so that it creates a NewJava type. +// so that it creates a new type. func (d *DefaultErrorStrategy) GetTokenErrorDisplay(t Token) string { if t == nil { return "" @@ -545,52 +513,57 @@ func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string { return "'" + s + "'" } -// Compute the error recovery set for the current rule. During +// GetErrorRecoverySet computes the error recovery set for the current rule. During // rule invocation, the parser pushes the set of tokens that can -// follow that rule reference on the stack d amounts to +// follow that rule reference on the stack. This amounts to // computing FIRST of what follows the rule reference in the // enclosing rule. See LinearApproximator.FIRST(). +// // This local follow set only includes tokens // from within the rule i.e., the FIRST computation done by // ANTLR stops at the end of a rule. // -// # EXAMPLE +// # Example // // When you find a "no viable alt exception", the input is not // consistent with any of the alternatives for rule r. The best // thing to do is to consume tokens until you see something that -// can legally follow a call to r//or* any rule that called r. +// can legally follow a call to r or any rule that called r. // You don't want the exact set of viable next tokens because the // input might just be missing a token--you might consume the // rest of the input looking for one of the missing tokens. // -// Consider grammar: +// Consider the grammar: +// +// a : '[' b ']' +// | '(' b ')' +// ; // -// a : '[' b ']' -// | '(' b ')' +// b : c '^' INT +// ; // -// b : c '^' INT -// c : ID -// | INT +// c : ID +// | INT +// ; // // At each rule invocation, the set of tokens that could follow // that rule is pushed on a stack. Here are the various // context-sensitive follow sets: // -// FOLLOW(b1_in_a) = FIRST(']') = ']' -// FOLLOW(b2_in_a) = FIRST(')') = ')' -// FOLLOW(c_in_b) = FIRST('^') = '^' +// FOLLOW(b1_in_a) = FIRST(']') = ']' +// FOLLOW(b2_in_a) = FIRST(')') = ')' +// FOLLOW(c_in_b) = FIRST('^') = '^' // -// Upon erroneous input "[]", the call chain is +// Upon erroneous input “[]”, the call chain is // -// a -> b -> c +// a → b → c // // and, hence, the follow context stack is: // -// depth follow set start of rule execution -// 0 a (from main()) -// 1 ']' b -// 2 '^' c +// Depth Follow set Start of rule execution +// 0 a (from main()) +// 1 ']' b +// 2 '^' c // // Notice that ')' is not included, because b would have to have // been called from a different context in rule a for ')' to be @@ -598,11 +571,14 @@ func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string { // // For error recovery, we cannot consider FOLLOW(c) // (context-sensitive or otherwise). We need the combined set of -// all context-sensitive FOLLOW sets--the set of all tokens that +// all context-sensitive FOLLOW sets - the set of all tokens that // could follow any reference in the call chain. We need to // reSync to one of those tokens. Note that FOLLOW(c)='^' and if // we reSync'd to that token, we'd consume until EOF. We need to -// Sync to context-sensitive FOLLOWs for a, b, and c: {']','^'}. +// Sync to context-sensitive FOLLOWs for a, b, and c: +// +// {']','^'} +// // In this case, for input "[]", LA(1) is ']' and in the set, so we would // not consume anything. After printing an error, rule c would // return normally. Rule b would not find the required '^' though. @@ -620,22 +596,19 @@ func (d *DefaultErrorStrategy) escapeWSAndQuote(s string) string { // // ANTLR's error recovery mechanism is based upon original ideas: // -// "Algorithms + Data Structures = Programs" by Niklaus Wirth -// -// and -// -// "A note on error recovery in recursive descent parsers": -// http://portal.acm.org/citation.cfm?id=947902.947905 +// [Algorithms + Data Structures = Programs] by Niklaus Wirth and +// [A note on error recovery in recursive descent parsers]. // -// Later, Josef Grosch had some good ideas: +// Later, Josef Grosch had some good ideas in [Efficient and Comfortable Error Recovery in Recursive Descent +// Parsers] // -// "Efficient and Comfortable Error Recovery in Recursive Descent -// Parsers": -// ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip +// Like Grosch I implement context-sensitive FOLLOW sets that are combined at run-time upon error to avoid overhead +// during parsing. Later, the runtime Sync was improved for loops/sub-rules see [Sync] docs // -// Like Grosch I implement context-sensitive FOLLOW sets that are combined -// at run-time upon error to avoid overhead during parsing. -func (d *DefaultErrorStrategy) getErrorRecoverySet(recognizer Parser) *IntervalSet { +// [A note on error recovery in recursive descent parsers]: http://portal.acm.org/citation.cfm?id=947902.947905 +// [Algorithms + Data Structures = Programs]: https://t.ly/5QzgE +// [Efficient and Comfortable Error Recovery in Recursive Descent Parsers]: ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip +func (d *DefaultErrorStrategy) GetErrorRecoverySet(recognizer Parser) *IntervalSet { atn := recognizer.GetInterpreter().atn ctx := recognizer.GetParserRuleContext() recoverSet := NewIntervalSet() @@ -660,40 +633,36 @@ func (d *DefaultErrorStrategy) consumeUntil(recognizer Parser, set *IntervalSet) } } -// -// This implementation of {@link ANTLRErrorStrategy} responds to syntax errors +// The BailErrorStrategy implementation of ANTLRErrorStrategy responds to syntax errors // by immediately canceling the parse operation with a -// {@link ParseCancellationException}. The implementation ensures that the -// {@link ParserRuleContext//exception} field is set for all parse tree nodes +// [ParseCancellationException]. The implementation ensures that the +// [ParserRuleContext//exception] field is set for all parse tree nodes // that were not completed prior to encountering the error. // -//

-// This error strategy is useful in the following scenarios.

+// This error strategy is useful in the following scenarios. // -//
    -//
  • Two-stage parsing: This error strategy allows the first -// stage of two-stage parsing to immediately terminate if an error is -// encountered, and immediately fall back to the second stage. In addition to -// avoiding wasted work by attempting to recover from errors here, the empty -// implementation of {@link BailErrorStrategy//Sync} improves the performance of -// the first stage.
  • -//
  • Silent validation: When syntax errors are not being -// Reported or logged, and the parse result is simply ignored if errors occur, -// the {@link BailErrorStrategy} avoids wasting work on recovering from errors -// when the result will be ignored either way.
  • -//
+// - Two-stage parsing: This error strategy allows the first +// stage of two-stage parsing to immediately terminate if an error is +// encountered, and immediately fall back to the second stage. In addition to +// avoiding wasted work by attempting to recover from errors here, the empty +// implementation of [BailErrorStrategy.Sync] improves the performance of +// the first stage. // -//

-// {@code myparser.setErrorHandler(NewBailErrorStrategy())}

+// - Silent validation: When syntax errors are not being +// Reported or logged, and the parse result is simply ignored if errors occur, +// the [BailErrorStrategy] avoids wasting work on recovering from errors +// when the result will be ignored either way. // -// @see Parser//setErrorHandler(ANTLRErrorStrategy) - +// myparser.SetErrorHandler(NewBailErrorStrategy()) +// +// See also: [Parser.SetErrorHandler(ANTLRErrorStrategy)] type BailErrorStrategy struct { *DefaultErrorStrategy } var _ ErrorStrategy = &BailErrorStrategy{} +//goland:noinspection GoUnusedExportedFunction func NewBailErrorStrategy() *BailErrorStrategy { b := new(BailErrorStrategy) @@ -703,10 +672,10 @@ func NewBailErrorStrategy() *BailErrorStrategy { return b } -// Instead of recovering from exception {@code e}, re-panic it wrapped -// in a {@link ParseCancellationException} so it is not caught by the -// rule func catches. Use {@link Exception//getCause()} to get the -// original {@link RecognitionException}. +// Recover Instead of recovering from exception e, re-panic it wrapped +// in a [ParseCancellationException] so it is not caught by the +// rule func catches. Use Exception.GetCause() to get the +// original [RecognitionException]. func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) { context := recognizer.GetParserRuleContext() for context != nil { @@ -717,10 +686,10 @@ func (b *BailErrorStrategy) Recover(recognizer Parser, e RecognitionException) { context = nil } } - panic(NewParseCancellationException()) // TODO we don't emit e properly + recognizer.SetError(NewParseCancellationException()) // TODO: we don't emit e properly } -// Make sure we don't attempt to recover inline if the parser +// RecoverInline makes sure we don't attempt to recover inline if the parser // successfully recovers, it won't panic an exception. func (b *BailErrorStrategy) RecoverInline(recognizer Parser) Token { b.Recover(recognizer, NewInputMisMatchException(recognizer)) @@ -728,7 +697,6 @@ func (b *BailErrorStrategy) RecoverInline(recognizer Parser) Token { return nil } -// Make sure we don't attempt to recover from problems in subrules.// -func (b *BailErrorStrategy) Sync(recognizer Parser) { - // pass +// Sync makes sure we don't attempt to recover from problems in sub-rules. +func (b *BailErrorStrategy) Sync(_ Parser) { } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/errors.go b/vendor/github.com/antlr4-go/antlr/v4/errors.go similarity index 73% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/errors.go rename to vendor/github.com/antlr4-go/antlr/v4/errors.go index 3954c1378..8f0f2f601 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/errors.go +++ b/vendor/github.com/antlr4-go/antlr/v4/errors.go @@ -35,7 +35,7 @@ func NewBaseRecognitionException(message string, recognizer Recognizer, input In // } else { // stack := NewError().stack // } - // TODO may be able to use - "runtime" func Stack(buf []byte, all bool) int + // TODO: may be able to use - "runtime" func Stack(buf []byte, all bool) int t := new(BaseRecognitionException) @@ -43,15 +43,17 @@ func NewBaseRecognitionException(message string, recognizer Recognizer, input In t.recognizer = recognizer t.input = input t.ctx = ctx - // The current {@link Token} when an error occurred. Since not all streams + + // The current Token when an error occurred. Since not all streams // support accessing symbols by index, we have to track the {@link Token} // instance itself. + // t.offendingToken = nil + // Get the ATN state number the parser was in at the time the error - // occurred. For {@link NoViableAltException} and - // {@link LexerNoViableAltException} exceptions, this is the - // {@link DecisionState} number. For others, it is the state whose outgoing - // edge we couldn't Match. + // occurred. For NoViableAltException and LexerNoViableAltException exceptions, this is the + // DecisionState number. For others, it is the state whose outgoing edge we couldn't Match. + // t.offendingState = -1 if t.recognizer != nil { t.offendingState = t.recognizer.GetState() @@ -74,15 +76,15 @@ func (b *BaseRecognitionException) GetInputStream() IntStream { //

If the state number is not known, b method returns -1.

-// Gets the set of input symbols which could potentially follow the -// previously Matched symbol at the time b exception was panicn. +// getExpectedTokens gets the set of input symbols which could potentially follow the +// previously Matched symbol at the time this exception was raised. // -//

If the set of expected tokens is not known and could not be computed, -// b method returns {@code nil}.

+// If the set of expected tokens is not known and could not be computed, +// this method returns nil. // -// @return The set of token types that could potentially follow the current -// state in the ATN, or {@code nil} if the information is not available. -// / +// The func returns the set of token types that could potentially follow the current +// state in the {ATN}, or nil if the information is not available. + func (b *BaseRecognitionException) getExpectedTokens() *IntervalSet { if b.recognizer != nil { return b.recognizer.GetATN().getExpectedTokens(b.offendingState, b.ctx) @@ -99,10 +101,10 @@ type LexerNoViableAltException struct { *BaseRecognitionException startIndex int - deadEndConfigs ATNConfigSet + deadEndConfigs *ATNConfigSet } -func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs ATNConfigSet) *LexerNoViableAltException { +func NewLexerNoViableAltException(lexer Lexer, input CharStream, startIndex int, deadEndConfigs *ATNConfigSet) *LexerNoViableAltException { l := new(LexerNoViableAltException) @@ -128,14 +130,16 @@ type NoViableAltException struct { startToken Token offendingToken Token ctx ParserRuleContext - deadEndConfigs ATNConfigSet + deadEndConfigs *ATNConfigSet } -// Indicates that the parser could not decide which of two or more paths +// NewNoViableAltException creates an exception indicating that the parser could not decide which of two or more paths // to take based upon the remaining input. It tracks the starting token // of the offending input and also knows where the parser was -// in the various paths when the error. Reported by ReportNoViableAlternative() -func NewNoViableAltException(recognizer Parser, input TokenStream, startToken Token, offendingToken Token, deadEndConfigs ATNConfigSet, ctx ParserRuleContext) *NoViableAltException { +// in the various paths when the error. +// +// Reported by [ReportNoViableAlternative] +func NewNoViableAltException(recognizer Parser, input TokenStream, startToken Token, offendingToken Token, deadEndConfigs *ATNConfigSet, ctx ParserRuleContext) *NoViableAltException { if ctx == nil { ctx = recognizer.GetParserRuleContext() @@ -157,12 +161,14 @@ func NewNoViableAltException(recognizer Parser, input TokenStream, startToken To n.BaseRecognitionException = NewBaseRecognitionException("", recognizer, input, ctx) // Which configurations did we try at input.Index() that couldn't Match - // input.LT(1)?// + // input.LT(1) n.deadEndConfigs = deadEndConfigs + // The token object at the start index the input stream might - // not be buffering tokens so get a reference to it. (At the - // time the error occurred, of course the stream needs to keep a - // buffer all of the tokens but later we might not have access to those.) + // not be buffering tokens so get a reference to it. + // + // At the time the error occurred, of course the stream needs to keep a + // buffer of all the tokens, but later we might not have access to those. n.startToken = startToken n.offendingToken = offendingToken @@ -173,7 +179,7 @@ type InputMisMatchException struct { *BaseRecognitionException } -// This signifies any kind of mismatched input exceptions such as +// NewInputMisMatchException creates an exception that signifies any kind of mismatched input exceptions such as // when the current input does not Match the expected token. func NewInputMisMatchException(recognizer Parser) *InputMisMatchException { @@ -186,11 +192,10 @@ func NewInputMisMatchException(recognizer Parser) *InputMisMatchException { } -// A semantic predicate failed during validation. Validation of predicates +// FailedPredicateException indicates that a semantic predicate failed during validation. Validation of predicates // occurs when normally parsing the alternative just like Matching a token. // Disambiguating predicate evaluation occurs when we test a predicate during // prediction. - type FailedPredicateException struct { *BaseRecognitionException @@ -199,6 +204,7 @@ type FailedPredicateException struct { predicate string } +//goland:noinspection GoUnusedExportedFunction func NewFailedPredicateException(recognizer Parser, predicate string, message string) *FailedPredicateException { f := new(FailedPredicateException) @@ -231,6 +237,21 @@ func (f *FailedPredicateException) formatMessage(predicate, message string) stri type ParseCancellationException struct { } +func (p ParseCancellationException) GetOffendingToken() Token { + //TODO implement me + panic("implement me") +} + +func (p ParseCancellationException) GetMessage() string { + //TODO implement me + panic("implement me") +} + +func (p ParseCancellationException) GetInputStream() IntStream { + //TODO implement me + panic("implement me") +} + func NewParseCancellationException() *ParseCancellationException { // Error.call(this) // Error.captureStackTrace(this, ParseCancellationException) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/file_stream.go b/vendor/github.com/antlr4-go/antlr/v4/file_stream.go similarity index 52% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/file_stream.go rename to vendor/github.com/antlr4-go/antlr/v4/file_stream.go index bd6ad5efe..5f65f809b 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/file_stream.go +++ b/vendor/github.com/antlr4-go/antlr/v4/file_stream.go @@ -5,8 +5,7 @@ package antlr import ( - "bytes" - "io" + "bufio" "os" ) @@ -14,34 +13,53 @@ import ( // when you construct the object. type FileStream struct { - *InputStream - + InputStream filename string } +//goland:noinspection GoUnusedExportedFunction func NewFileStream(fileName string) (*FileStream, error) { - buf := bytes.NewBuffer(nil) - f, err := os.Open(fileName) if err != nil { return nil, err } - defer f.Close() - _, err = io.Copy(buf, f) + + defer func(f *os.File) { + errF := f.Close() + if errF != nil { + } + }(f) + + reader := bufio.NewReader(f) + fInfo, err := f.Stat() if err != nil { return nil, err } - fs := new(FileStream) - - fs.filename = fileName - s := string(buf.Bytes()) + fs := &FileStream{ + InputStream: InputStream{ + index: 0, + name: fileName, + }, + filename: fileName, + } - fs.InputStream = NewInputStream(s) + // Pre-build the buffer and read runes efficiently + // + fs.data = make([]rune, 0, fInfo.Size()) + for { + r, _, err := reader.ReadRune() + if err != nil { + break + } + fs.data = append(fs.data, r) + } + fs.size = len(fs.data) // Size in runes + // All done. + // return fs, nil - } func (f *FileStream) GetSourceName() string { diff --git a/vendor/github.com/antlr4-go/antlr/v4/input_stream.go b/vendor/github.com/antlr4-go/antlr/v4/input_stream.go new file mode 100644 index 000000000..b737fe85f --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/input_stream.go @@ -0,0 +1,157 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +import ( + "bufio" + "io" +) + +type InputStream struct { + name string + index int + data []rune + size int +} + +// NewIoStream creates a new input stream from the given io.Reader reader. +// Note that the reader is read completely into memory and so it must actually +// have a stopping point - you cannot pass in a reader on an open-ended source such +// as a socket for instance. +func NewIoStream(reader io.Reader) *InputStream { + + rReader := bufio.NewReader(reader) + + is := &InputStream{ + name: "", + index: 0, + } + + // Pre-build the buffer and read runes reasonably efficiently given that + // we don't exactly know how big the input is. + // + is.data = make([]rune, 0, 512) + for { + r, _, err := rReader.ReadRune() + if err != nil { + break + } + is.data = append(is.data, r) + } + is.size = len(is.data) // number of runes + return is +} + +// NewInputStream creates a new input stream from the given string +func NewInputStream(data string) *InputStream { + + is := &InputStream{ + name: "", + index: 0, + data: []rune(data), // This is actually the most efficient way + } + is.size = len(is.data) // number of runes, but we could also use len(data), which is efficient too + return is +} + +func (is *InputStream) reset() { + is.index = 0 +} + +// Consume moves the input pointer to the next character in the input stream +func (is *InputStream) Consume() { + if is.index >= is.size { + // assert is.LA(1) == TokenEOF + panic("cannot consume EOF") + } + is.index++ +} + +// LA returns the character at the given offset from the start of the input stream +func (is *InputStream) LA(offset int) int { + + if offset == 0 { + return 0 // nil + } + if offset < 0 { + offset++ // e.g., translate LA(-1) to use offset=0 + } + pos := is.index + offset - 1 + + if pos < 0 || pos >= is.size { // invalid + return TokenEOF + } + + return int(is.data[pos]) +} + +// LT returns the character at the given offset from the start of the input stream +func (is *InputStream) LT(offset int) int { + return is.LA(offset) +} + +// Index returns the current offset in to the input stream +func (is *InputStream) Index() int { + return is.index +} + +// Size returns the total number of characters in the input stream +func (is *InputStream) Size() int { + return is.size +} + +// Mark does nothing here as we have entire buffer +func (is *InputStream) Mark() int { + return -1 +} + +// Release does nothing here as we have entire buffer +func (is *InputStream) Release(_ int) { +} + +// Seek the input point to the provided index offset +func (is *InputStream) Seek(index int) { + if index <= is.index { + is.index = index // just jump don't update stream state (line,...) + return + } + // seek forward + is.index = intMin(index, is.size) +} + +// GetText returns the text from the input stream from the start to the stop index +func (is *InputStream) GetText(start int, stop int) string { + if stop >= is.size { + stop = is.size - 1 + } + if start >= is.size { + return "" + } + + return string(is.data[start : stop+1]) +} + +// GetTextFromTokens returns the text from the input stream from the first character of the start token to the last +// character of the stop token +func (is *InputStream) GetTextFromTokens(start, stop Token) string { + if start != nil && stop != nil { + return is.GetTextFromInterval(NewInterval(start.GetTokenIndex(), stop.GetTokenIndex())) + } + + return "" +} + +func (is *InputStream) GetTextFromInterval(i Interval) string { + return is.GetText(i.Start, i.Stop) +} + +func (*InputStream) GetSourceName() string { + return "" +} + +// String returns the entire input stream as a string +func (is *InputStream) String() string { + return string(is.data) +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/int_stream.go b/vendor/github.com/antlr4-go/antlr/v4/int_stream.go similarity index 100% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/int_stream.go rename to vendor/github.com/antlr4-go/antlr/v4/int_stream.go diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/interval_set.go b/vendor/github.com/antlr4-go/antlr/v4/interval_set.go similarity index 82% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/interval_set.go rename to vendor/github.com/antlr4-go/antlr/v4/interval_set.go index c1e155e81..cc5066067 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/interval_set.go +++ b/vendor/github.com/antlr4-go/antlr/v4/interval_set.go @@ -14,20 +14,21 @@ type Interval struct { Stop int } -/* stop is not included! */ -func NewInterval(start, stop int) *Interval { - i := new(Interval) - - i.Start = start - i.Stop = stop - return i +// NewInterval creates a new interval with the given start and stop values. +func NewInterval(start, stop int) Interval { + return Interval{ + Start: start, + Stop: stop, + } } -func (i *Interval) Contains(item int) bool { +// Contains returns true if the given item is contained within the interval. +func (i Interval) Contains(item int) bool { return item >= i.Start && item < i.Stop } -func (i *Interval) String() string { +// String generates a string representation of the interval. +func (i Interval) String() string { if i.Start == i.Stop-1 { return strconv.Itoa(i.Start) } @@ -35,15 +36,18 @@ func (i *Interval) String() string { return strconv.Itoa(i.Start) + ".." + strconv.Itoa(i.Stop-1) } -func (i *Interval) length() int { +// Length returns the length of the interval. +func (i Interval) Length() int { return i.Stop - i.Start } +// IntervalSet represents a collection of [Intervals], which may be read-only. type IntervalSet struct { - intervals []*Interval + intervals []Interval readOnly bool } +// NewIntervalSet creates a new empty, writable, interval set. func NewIntervalSet() *IntervalSet { i := new(IntervalSet) @@ -54,6 +58,20 @@ func NewIntervalSet() *IntervalSet { return i } +func (i *IntervalSet) Equals(other *IntervalSet) bool { + if len(i.intervals) != len(other.intervals) { + return false + } + + for k, v := range i.intervals { + if v.Start != other.intervals[k].Start || v.Stop != other.intervals[k].Stop { + return false + } + } + + return true +} + func (i *IntervalSet) first() int { if len(i.intervals) == 0 { return TokenInvalidType @@ -70,16 +88,16 @@ func (i *IntervalSet) addRange(l, h int) { i.addInterval(NewInterval(l, h+1)) } -func (i *IntervalSet) addInterval(v *Interval) { +func (i *IntervalSet) addInterval(v Interval) { if i.intervals == nil { - i.intervals = make([]*Interval, 0) + i.intervals = make([]Interval, 0) i.intervals = append(i.intervals, v) } else { // find insert pos for k, interval := range i.intervals { // distinct range -> insert if v.Stop < interval.Start { - i.intervals = append(i.intervals[0:k], append([]*Interval{v}, i.intervals[k:]...)...) + i.intervals = append(i.intervals[0:k], append([]Interval{v}, i.intervals[k:]...)...) return } else if v.Stop == interval.Start { i.intervals[k].Start = v.Start @@ -139,16 +157,16 @@ func (i *IntervalSet) contains(item int) bool { } func (i *IntervalSet) length() int { - len := 0 + iLen := 0 for _, v := range i.intervals { - len += v.length() + iLen += v.Length() } - return len + return iLen } -func (i *IntervalSet) removeRange(v *Interval) { +func (i *IntervalSet) removeRange(v Interval) { if v.Start == v.Stop-1 { i.removeOne(v.Start) } else if i.intervals != nil { @@ -162,7 +180,7 @@ func (i *IntervalSet) removeRange(v *Interval) { i.intervals[k] = NewInterval(ni.Start, v.Start) x := NewInterval(v.Stop, ni.Stop) // i.intervals.splice(k, 0, x) - i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...) + i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...) return } else if v.Start <= ni.Start && v.Stop >= ni.Stop { // i.intervals.splice(k, 1) @@ -199,7 +217,7 @@ func (i *IntervalSet) removeOne(v int) { x := NewInterval(ki.Start, v) ki.Start = v + 1 // i.intervals.splice(k, 0, x) - i.intervals = append(i.intervals[0:k], append([]*Interval{x}, i.intervals[k:]...)...) + i.intervals = append(i.intervals[0:k], append([]Interval{x}, i.intervals[k:]...)...) return } } @@ -223,7 +241,7 @@ func (i *IntervalSet) StringVerbose(literalNames []string, symbolicNames []strin return i.toIndexString() } -func (i *IntervalSet) GetIntervals() []*Interval { +func (i *IntervalSet) GetIntervals() []Interval { return i.intervals } diff --git a/vendor/github.com/antlr4-go/antlr/v4/jcollect.go b/vendor/github.com/antlr4-go/antlr/v4/jcollect.go new file mode 100644 index 000000000..ceccd96d2 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/jcollect.go @@ -0,0 +1,685 @@ +package antlr + +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +import ( + "container/list" + "runtime/debug" + "sort" + "sync" +) + +// Collectable is an interface that a struct should implement if it is to be +// usable as a key in these collections. +type Collectable[T any] interface { + Hash() int + Equals(other Collectable[T]) bool +} + +type Comparator[T any] interface { + Hash1(o T) int + Equals2(T, T) bool +} + +type CollectionSource int +type CollectionDescriptor struct { + SybolicName string + Description string +} + +const ( + UnknownCollection CollectionSource = iota + ATNConfigLookupCollection + ATNStateCollection + DFAStateCollection + ATNConfigCollection + PredictionContextCollection + SemanticContextCollection + ClosureBusyCollection + PredictionVisitedCollection + MergeCacheCollection + PredictionContextCacheCollection + AltSetCollection + ReachSetCollection +) + +var CollectionDescriptors = map[CollectionSource]CollectionDescriptor{ + UnknownCollection: { + SybolicName: "UnknownCollection", + Description: "Unknown collection type. Only used if the target author thought it was an unimportant collection.", + }, + ATNConfigCollection: { + SybolicName: "ATNConfigCollection", + Description: "ATNConfig collection. Used to store the ATNConfigs for a particular state in the ATN." + + "For instance, it is used to store the results of the closure() operation in the ATN.", + }, + ATNConfigLookupCollection: { + SybolicName: "ATNConfigLookupCollection", + Description: "ATNConfigLookup collection. Used to store the ATNConfigs for a particular state in the ATN." + + "This is used to prevent duplicating equivalent states in an ATNConfigurationSet.", + }, + ATNStateCollection: { + SybolicName: "ATNStateCollection", + Description: "ATNState collection. This is used to store the states of the ATN.", + }, + DFAStateCollection: { + SybolicName: "DFAStateCollection", + Description: "DFAState collection. This is used to store the states of the DFA.", + }, + PredictionContextCollection: { + SybolicName: "PredictionContextCollection", + Description: "PredictionContext collection. This is used to store the prediction contexts of the ATN and cache computes.", + }, + SemanticContextCollection: { + SybolicName: "SemanticContextCollection", + Description: "SemanticContext collection. This is used to store the semantic contexts of the ATN.", + }, + ClosureBusyCollection: { + SybolicName: "ClosureBusyCollection", + Description: "ClosureBusy collection. This is used to check and prevent infinite recursion right recursive rules." + + "It stores ATNConfigs that are currently being processed in the closure() operation.", + }, + PredictionVisitedCollection: { + SybolicName: "PredictionVisitedCollection", + Description: "A map that records whether we have visited a particular context when searching through cached entries.", + }, + MergeCacheCollection: { + SybolicName: "MergeCacheCollection", + Description: "A map that records whether we have already merged two particular contexts and can save effort by not repeating it.", + }, + PredictionContextCacheCollection: { + SybolicName: "PredictionContextCacheCollection", + Description: "A map that records whether we have already created a particular context and can save effort by not computing it again.", + }, + AltSetCollection: { + SybolicName: "AltSetCollection", + Description: "Used to eliminate duplicate alternatives in an ATN config set.", + }, + ReachSetCollection: { + SybolicName: "ReachSetCollection", + Description: "Used as merge cache to prevent us needing to compute the merge of two states if we have already done it.", + }, +} + +// JStore implements a container that allows the use of a struct to calculate the key +// for a collection of values akin to map. This is not meant to be a full-blown HashMap but just +// serve the needs of the ANTLR Go runtime. +// +// For ease of porting the logic of the runtime from the master target (Java), this collection +// operates in a similar way to Java, in that it can use any struct that supplies a Hash() and Equals() +// function as the key. The values are stored in a standard go map which internally is a form of hashmap +// itself, the key for the go map is the hash supplied by the key object. The collection is able to deal with +// hash conflicts by using a simple slice of values associated with the hash code indexed bucket. That isn't +// particularly efficient, but it is simple, and it works. As this is specifically for the ANTLR runtime, and +// we understand the requirements, then this is fine - this is not a general purpose collection. +type JStore[T any, C Comparator[T]] struct { + store map[int][]T + len int + comparator Comparator[T] + stats *JStatRec +} + +func NewJStore[T any, C Comparator[T]](comparator Comparator[T], cType CollectionSource, desc string) *JStore[T, C] { + + if comparator == nil { + panic("comparator cannot be nil") + } + + s := &JStore[T, C]{ + store: make(map[int][]T, 1), + comparator: comparator, + } + if collectStats { + s.stats = &JStatRec{ + Source: cType, + Description: desc, + } + + // Track where we created it from if we are being asked to do so + if runtimeConfig.statsTraceStacks { + s.stats.CreateStack = debug.Stack() + } + Statistics.AddJStatRec(s.stats) + } + return s +} + +// Put will store given value in the collection. Note that the key for storage is generated from +// the value itself - this is specifically because that is what ANTLR needs - this would not be useful +// as any kind of general collection. +// +// If the key has a hash conflict, then the value will be added to the slice of values associated with the +// hash, unless the value is already in the slice, in which case the existing value is returned. Value equivalence is +// tested by calling the equals() method on the key. +// +// # If the given value is already present in the store, then the existing value is returned as v and exists is set to true +// +// If the given value is not present in the store, then the value is added to the store and returned as v and exists is set to false. +func (s *JStore[T, C]) Put(value T) (v T, exists bool) { + + if collectStats { + s.stats.Puts++ + } + kh := s.comparator.Hash1(value) + + var hClash bool + for _, v1 := range s.store[kh] { + hClash = true + if s.comparator.Equals2(value, v1) { + if collectStats { + s.stats.PutHits++ + s.stats.PutHashConflicts++ + } + return v1, true + } + if collectStats { + s.stats.PutMisses++ + } + } + if collectStats && hClash { + s.stats.PutHashConflicts++ + } + s.store[kh] = append(s.store[kh], value) + + if collectStats { + if len(s.store[kh]) > s.stats.MaxSlotSize { + s.stats.MaxSlotSize = len(s.store[kh]) + } + } + s.len++ + if collectStats { + s.stats.CurSize = s.len + if s.len > s.stats.MaxSize { + s.stats.MaxSize = s.len + } + } + return value, false +} + +// Get will return the value associated with the key - the type of the key is the same type as the value +// which would not generally be useful, but this is a specific thing for ANTLR where the key is +// generated using the object we are going to store. +func (s *JStore[T, C]) Get(key T) (T, bool) { + if collectStats { + s.stats.Gets++ + } + kh := s.comparator.Hash1(key) + var hClash bool + for _, v := range s.store[kh] { + hClash = true + if s.comparator.Equals2(key, v) { + if collectStats { + s.stats.GetHits++ + s.stats.GetHashConflicts++ + } + return v, true + } + if collectStats { + s.stats.GetMisses++ + } + } + if collectStats { + if hClash { + s.stats.GetHashConflicts++ + } + s.stats.GetNoEnt++ + } + return key, false +} + +// Contains returns true if the given key is present in the store +func (s *JStore[T, C]) Contains(key T) bool { + _, present := s.Get(key) + return present +} + +func (s *JStore[T, C]) SortedSlice(less func(i, j T) bool) []T { + vs := make([]T, 0, len(s.store)) + for _, v := range s.store { + vs = append(vs, v...) + } + sort.Slice(vs, func(i, j int) bool { + return less(vs[i], vs[j]) + }) + + return vs +} + +func (s *JStore[T, C]) Each(f func(T) bool) { + for _, e := range s.store { + for _, v := range e { + f(v) + } + } +} + +func (s *JStore[T, C]) Len() int { + return s.len +} + +func (s *JStore[T, C]) Values() []T { + vs := make([]T, 0, len(s.store)) + for _, e := range s.store { + vs = append(vs, e...) + } + return vs +} + +type entry[K, V any] struct { + key K + val V +} + +type JMap[K, V any, C Comparator[K]] struct { + store map[int][]*entry[K, V] + len int + comparator Comparator[K] + stats *JStatRec +} + +func NewJMap[K, V any, C Comparator[K]](comparator Comparator[K], cType CollectionSource, desc string) *JMap[K, V, C] { + m := &JMap[K, V, C]{ + store: make(map[int][]*entry[K, V], 1), + comparator: comparator, + } + if collectStats { + m.stats = &JStatRec{ + Source: cType, + Description: desc, + } + // Track where we created it from if we are being asked to do so + if runtimeConfig.statsTraceStacks { + m.stats.CreateStack = debug.Stack() + } + Statistics.AddJStatRec(m.stats) + } + return m +} + +func (m *JMap[K, V, C]) Put(key K, val V) (V, bool) { + if collectStats { + m.stats.Puts++ + } + kh := m.comparator.Hash1(key) + + var hClash bool + for _, e := range m.store[kh] { + hClash = true + if m.comparator.Equals2(e.key, key) { + if collectStats { + m.stats.PutHits++ + m.stats.PutHashConflicts++ + } + return e.val, true + } + if collectStats { + m.stats.PutMisses++ + } + } + if collectStats { + if hClash { + m.stats.PutHashConflicts++ + } + } + m.store[kh] = append(m.store[kh], &entry[K, V]{key, val}) + if collectStats { + if len(m.store[kh]) > m.stats.MaxSlotSize { + m.stats.MaxSlotSize = len(m.store[kh]) + } + } + m.len++ + if collectStats { + m.stats.CurSize = m.len + if m.len > m.stats.MaxSize { + m.stats.MaxSize = m.len + } + } + return val, false +} + +func (m *JMap[K, V, C]) Values() []V { + vs := make([]V, 0, len(m.store)) + for _, e := range m.store { + for _, v := range e { + vs = append(vs, v.val) + } + } + return vs +} + +func (m *JMap[K, V, C]) Get(key K) (V, bool) { + if collectStats { + m.stats.Gets++ + } + var none V + kh := m.comparator.Hash1(key) + var hClash bool + for _, e := range m.store[kh] { + hClash = true + if m.comparator.Equals2(e.key, key) { + if collectStats { + m.stats.GetHits++ + m.stats.GetHashConflicts++ + } + return e.val, true + } + if collectStats { + m.stats.GetMisses++ + } + } + if collectStats { + if hClash { + m.stats.GetHashConflicts++ + } + m.stats.GetNoEnt++ + } + return none, false +} + +func (m *JMap[K, V, C]) Len() int { + return m.len +} + +func (m *JMap[K, V, C]) Delete(key K) { + kh := m.comparator.Hash1(key) + for i, e := range m.store[kh] { + if m.comparator.Equals2(e.key, key) { + m.store[kh] = append(m.store[kh][:i], m.store[kh][i+1:]...) + m.len-- + return + } + } +} + +func (m *JMap[K, V, C]) Clear() { + m.store = make(map[int][]*entry[K, V]) +} + +type JPCMap struct { + store *JMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]] + size int + stats *JStatRec +} + +func NewJPCMap(cType CollectionSource, desc string) *JPCMap { + m := &JPCMap{ + store: NewJMap[*PredictionContext, *JMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]], *ObjEqComparator[*PredictionContext]](pContextEqInst, cType, desc), + } + if collectStats { + m.stats = &JStatRec{ + Source: cType, + Description: desc, + } + // Track where we created it from if we are being asked to do so + if runtimeConfig.statsTraceStacks { + m.stats.CreateStack = debug.Stack() + } + Statistics.AddJStatRec(m.stats) + } + return m +} + +func (pcm *JPCMap) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) { + if collectStats { + pcm.stats.Gets++ + } + // Do we have a map stored by k1? + // + m2, present := pcm.store.Get(k1) + if present { + if collectStats { + pcm.stats.GetHits++ + } + // We found a map of values corresponding to k1, so now we need to look up k2 in that map + // + return m2.Get(k2) + } + if collectStats { + pcm.stats.GetMisses++ + } + return nil, false +} + +func (pcm *JPCMap) Put(k1, k2, v *PredictionContext) { + + if collectStats { + pcm.stats.Puts++ + } + // First does a map already exist for k1? + // + if m2, present := pcm.store.Get(k1); present { + if collectStats { + pcm.stats.PutHits++ + } + _, present = m2.Put(k2, v) + if !present { + pcm.size++ + if collectStats { + pcm.stats.CurSize = pcm.size + if pcm.size > pcm.stats.MaxSize { + pcm.stats.MaxSize = pcm.size + } + } + } + } else { + // No map found for k1, so we create it, add in our value, then store is + // + if collectStats { + pcm.stats.PutMisses++ + m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, pcm.stats.Source, pcm.stats.Description+" map entry") + } else { + m2 = NewJMap[*PredictionContext, *PredictionContext, *ObjEqComparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "map entry") + } + + m2.Put(k2, v) + pcm.store.Put(k1, m2) + pcm.size++ + } +} + +type JPCMap2 struct { + store map[int][]JPCEntry + size int + stats *JStatRec +} + +type JPCEntry struct { + k1, k2, v *PredictionContext +} + +func NewJPCMap2(cType CollectionSource, desc string) *JPCMap2 { + m := &JPCMap2{ + store: make(map[int][]JPCEntry, 1000), + } + if collectStats { + m.stats = &JStatRec{ + Source: cType, + Description: desc, + } + // Track where we created it from if we are being asked to do so + if runtimeConfig.statsTraceStacks { + m.stats.CreateStack = debug.Stack() + } + Statistics.AddJStatRec(m.stats) + } + return m +} + +func dHash(k1, k2 *PredictionContext) int { + return k1.cachedHash*31 + k2.cachedHash +} + +func (pcm *JPCMap2) Get(k1, k2 *PredictionContext) (*PredictionContext, bool) { + if collectStats { + pcm.stats.Gets++ + } + + h := dHash(k1, k2) + var hClash bool + for _, e := range pcm.store[h] { + hClash = true + if e.k1.Equals(k1) && e.k2.Equals(k2) { + if collectStats { + pcm.stats.GetHits++ + pcm.stats.GetHashConflicts++ + } + return e.v, true + } + if collectStats { + pcm.stats.GetMisses++ + } + } + if collectStats { + if hClash { + pcm.stats.GetHashConflicts++ + } + pcm.stats.GetNoEnt++ + } + return nil, false +} + +func (pcm *JPCMap2) Put(k1, k2, v *PredictionContext) (*PredictionContext, bool) { + if collectStats { + pcm.stats.Puts++ + } + h := dHash(k1, k2) + var hClash bool + for _, e := range pcm.store[h] { + hClash = true + if e.k1.Equals(k1) && e.k2.Equals(k2) { + if collectStats { + pcm.stats.PutHits++ + pcm.stats.PutHashConflicts++ + } + return e.v, true + } + if collectStats { + pcm.stats.PutMisses++ + } + } + if collectStats { + if hClash { + pcm.stats.PutHashConflicts++ + } + } + pcm.store[h] = append(pcm.store[h], JPCEntry{k1, k2, v}) + pcm.size++ + if collectStats { + pcm.stats.CurSize = pcm.size + if pcm.size > pcm.stats.MaxSize { + pcm.stats.MaxSize = pcm.size + } + } + return nil, false +} + +type VisitEntry struct { + k *PredictionContext + v *PredictionContext +} +type VisitRecord struct { + store map[*PredictionContext]*PredictionContext + len int + stats *JStatRec +} + +type VisitList struct { + cache *list.List + lock sync.RWMutex +} + +var visitListPool = VisitList{ + cache: list.New(), + lock: sync.RWMutex{}, +} + +// NewVisitRecord returns a new VisitRecord instance from the pool if available. +// Note that this "map" uses a pointer as a key because we are emulating the behavior of +// IdentityHashMap in Java, which uses the `==` operator to compare whether the keys are equal, +// which means is the key the same reference to an object rather than is it .equals() to another +// object. +func NewVisitRecord() *VisitRecord { + visitListPool.lock.Lock() + el := visitListPool.cache.Front() + defer visitListPool.lock.Unlock() + var vr *VisitRecord + if el == nil { + vr = &VisitRecord{ + store: make(map[*PredictionContext]*PredictionContext), + } + if collectStats { + vr.stats = &JStatRec{ + Source: PredictionContextCacheCollection, + Description: "VisitRecord", + } + // Track where we created it from if we are being asked to do so + if runtimeConfig.statsTraceStacks { + vr.stats.CreateStack = debug.Stack() + } + } + } else { + vr = el.Value.(*VisitRecord) + visitListPool.cache.Remove(el) + vr.store = make(map[*PredictionContext]*PredictionContext) + } + if collectStats { + Statistics.AddJStatRec(vr.stats) + } + return vr +} + +func (vr *VisitRecord) Release() { + vr.len = 0 + vr.store = nil + if collectStats { + vr.stats.MaxSize = 0 + vr.stats.CurSize = 0 + vr.stats.Gets = 0 + vr.stats.GetHits = 0 + vr.stats.GetMisses = 0 + vr.stats.GetHashConflicts = 0 + vr.stats.GetNoEnt = 0 + vr.stats.Puts = 0 + vr.stats.PutHits = 0 + vr.stats.PutMisses = 0 + vr.stats.PutHashConflicts = 0 + vr.stats.MaxSlotSize = 0 + } + visitListPool.lock.Lock() + visitListPool.cache.PushBack(vr) + visitListPool.lock.Unlock() +} + +func (vr *VisitRecord) Get(k *PredictionContext) (*PredictionContext, bool) { + if collectStats { + vr.stats.Gets++ + } + v := vr.store[k] + if v != nil { + if collectStats { + vr.stats.GetHits++ + } + return v, true + } + if collectStats { + vr.stats.GetNoEnt++ + } + return nil, false +} + +func (vr *VisitRecord) Put(k, v *PredictionContext) (*PredictionContext, bool) { + if collectStats { + vr.stats.Puts++ + } + vr.store[k] = v + vr.len++ + if collectStats { + vr.stats.CurSize = vr.len + if vr.len > vr.stats.MaxSize { + vr.stats.MaxSize = vr.len + } + } + return v, false +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer.go b/vendor/github.com/antlr4-go/antlr/v4/lexer.go similarity index 78% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer.go rename to vendor/github.com/antlr4-go/antlr/v4/lexer.go index 6533f0516..3c7896a91 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer.go +++ b/vendor/github.com/antlr4-go/antlr/v4/lexer.go @@ -69,7 +69,7 @@ func NewBaseLexer(input CharStream) *BaseLexer { // create a single token. NextToken will return l object after // Matching lexer rule(s). If you subclass to allow multiple token // emissions, then set l to the last token to be Matched or - // something nonnil so that the auto token emit mechanism will not + // something non nil so that the auto token emit mechanism will not // emit another token. lexer.token = nil @@ -111,6 +111,7 @@ const ( LexerSkip = -3 ) +//goland:noinspection GoUnusedConst const ( LexerDefaultTokenChannel = TokenDefaultChannel LexerHidden = TokenHiddenChannel @@ -118,7 +119,7 @@ const ( LexerMaxCharValue = 0x10FFFF ) -func (b *BaseLexer) reset() { +func (b *BaseLexer) Reset() { // wack Lexer state variables if b.input != nil { b.input.Seek(0) // rewind the input @@ -176,7 +177,7 @@ func (b *BaseLexer) safeMatch() (ret int) { return b.Interpreter.Match(b.input, b.mode) } -// Return a token from l source i.e., Match a token on the char stream. +// NextToken returns a token from the lexer input source i.e., Match a token on the source char stream. func (b *BaseLexer) NextToken() Token { if b.input == nil { panic("NextToken requires a non-nil input stream.") @@ -205,9 +206,8 @@ func (b *BaseLexer) NextToken() Token { continueOuter := false for { b.thetype = TokenInvalidType - ttype := LexerSkip - ttype = b.safeMatch() + ttype := b.safeMatch() if b.input.LA(1) == TokenEOF { b.hitEOF = true @@ -234,12 +234,11 @@ func (b *BaseLexer) NextToken() Token { } } -// Instruct the lexer to Skip creating a token for current lexer rule -// and look for another token. NextToken() knows to keep looking when -// a lexer rule finishes with token set to SKIPTOKEN. Recall that +// Skip instructs the lexer to Skip creating a token for current lexer rule +// and look for another token. [NextToken] knows to keep looking when +// a lexer rule finishes with token set to [SKIPTOKEN]. Recall that // if token==nil at end of any token rule, it creates one for you // and emits it. -// / func (b *BaseLexer) Skip() { b.thetype = LexerSkip } @@ -248,23 +247,29 @@ func (b *BaseLexer) More() { b.thetype = LexerMore } +// SetMode changes the lexer to a new mode. The lexer will use this mode from hereon in and the rules for that mode +// will be in force. func (b *BaseLexer) SetMode(m int) { b.mode = m } +// PushMode saves the current lexer mode so that it can be restored later. See [PopMode], then sets the +// current lexer mode to the supplied mode m. func (b *BaseLexer) PushMode(m int) { - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("pushMode " + strconv.Itoa(m)) } b.modeStack.Push(b.mode) b.mode = m } +// PopMode restores the lexer mode saved by a call to [PushMode]. It is a panic error if there is no saved mode to +// return to. func (b *BaseLexer) PopMode() int { if len(b.modeStack) == 0 { panic("Empty Stack") } - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1])) } i, _ := b.modeStack.Pop() @@ -280,7 +285,7 @@ func (b *BaseLexer) inputStream() CharStream { func (b *BaseLexer) SetInputStream(input CharStream) { b.input = nil b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input} - b.reset() + b.Reset() b.input = input b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input} } @@ -289,20 +294,19 @@ func (b *BaseLexer) GetTokenSourceCharStreamPair() *TokenSourceCharStreamPair { return b.tokenFactorySourcePair } -// By default does not support multiple emits per NextToken invocation -// for efficiency reasons. Subclass and override l method, NextToken, -// and GetToken (to push tokens into a list and pull from that list -// rather than a single variable as l implementation does). -// / +// EmitToken by default does not support multiple emits per [NextToken] invocation +// for efficiency reasons. Subclass and override this func, [NextToken], +// and [GetToken] (to push tokens into a list and pull from that list +// rather than a single variable as this implementation does). func (b *BaseLexer) EmitToken(token Token) { b.token = token } -// The standard method called to automatically emit a token at the +// Emit is the standard method called to automatically emit a token at the // outermost lexical rule. The token object should point into the // char buffer start..stop. If there is a text override in 'text', -// use that to set the token's text. Override l method to emit -// custom Token objects or provide a Newfactory. +// use that to set the token's text. Override this method to emit +// custom [Token] objects or provide a new factory. // / func (b *BaseLexer) Emit() Token { t := b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn) @@ -310,6 +314,7 @@ func (b *BaseLexer) Emit() Token { return t } +// EmitEOF emits an EOF token. By default, this is the last token emitted func (b *BaseLexer) EmitEOF() Token { cpos := b.GetCharPositionInLine() lpos := b.GetLine() @@ -318,6 +323,7 @@ func (b *BaseLexer) EmitEOF() Token { return eof } +// GetCharPositionInLine returns the current position in the current line as far as the lexer is concerned. func (b *BaseLexer) GetCharPositionInLine() int { return b.Interpreter.GetCharPositionInLine() } @@ -334,13 +340,12 @@ func (b *BaseLexer) SetType(t int) { b.thetype = t } -// What is the index of the current character of lookahead?/// +// GetCharIndex returns the index of the current character of lookahead func (b *BaseLexer) GetCharIndex() int { return b.input.Index() } -// Return the text Matched so far for the current token or any text override. -// Set the complete text of l token it wipes any previous changes to the text. +// GetText returns the text Matched so far for the current token or any text override. func (b *BaseLexer) GetText() string { if b.text != "" { return b.text @@ -349,17 +354,20 @@ func (b *BaseLexer) GetText() string { return b.Interpreter.GetText(b.input) } +// SetText sets the complete text of this token; it wipes any previous changes to the text. func (b *BaseLexer) SetText(text string) { b.text = text } +// GetATN returns the ATN used by the lexer. func (b *BaseLexer) GetATN() *ATN { return b.Interpreter.ATN() } -// Return a list of all Token objects in input char stream. -// Forces load of all tokens. Does not include EOF token. -// / +// GetAllTokens returns a list of all [Token] objects in input char stream. +// Forces a load of all tokens that can be made from the input char stream. +// +// Does not include EOF token. func (b *BaseLexer) GetAllTokens() []Token { vl := b.Virt tokens := make([]Token, 0) @@ -398,11 +406,13 @@ func (b *BaseLexer) getCharErrorDisplay(c rune) string { return "'" + b.getErrorDisplayForChar(c) + "'" } -// Lexers can normally Match any char in it's vocabulary after Matching -// a token, so do the easy thing and just kill a character and hope +// Recover can normally Match any char in its vocabulary after Matching +// a token, so here we do the easy thing and just kill a character and hope // it all works out. You can instead use the rule invocation stack // to do sophisticated error recovery if you are in a fragment rule. -// / +// +// In general, lexers should not need to recover and should have rules that cover any eventuality, such as +// a character that makes no sense to the recognizer. func (b *BaseLexer) Recover(re RecognitionException) { if b.input.LA(1) != TokenEOF { if _, ok := re.(*LexerNoViableAltException); ok { diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go similarity index 78% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action.go rename to vendor/github.com/antlr4-go/antlr/v4/lexer_action.go index 111656c29..eaa7393e0 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action.go +++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_action.go @@ -7,14 +7,29 @@ package antlr import "strconv" const ( - LexerActionTypeChannel = 0 //The type of a {@link LexerChannelAction} action. - LexerActionTypeCustom = 1 //The type of a {@link LexerCustomAction} action. - LexerActionTypeMode = 2 //The type of a {@link LexerModeAction} action. - LexerActionTypeMore = 3 //The type of a {@link LexerMoreAction} action. - LexerActionTypePopMode = 4 //The type of a {@link LexerPopModeAction} action. - LexerActionTypePushMode = 5 //The type of a {@link LexerPushModeAction} action. - LexerActionTypeSkip = 6 //The type of a {@link LexerSkipAction} action. - LexerActionTypeType = 7 //The type of a {@link LexerTypeAction} action. + // LexerActionTypeChannel represents a [LexerChannelAction] action. + LexerActionTypeChannel = 0 + + // LexerActionTypeCustom represents a [LexerCustomAction] action. + LexerActionTypeCustom = 1 + + // LexerActionTypeMode represents a [LexerModeAction] action. + LexerActionTypeMode = 2 + + // LexerActionTypeMore represents a [LexerMoreAction] action. + LexerActionTypeMore = 3 + + // LexerActionTypePopMode represents a [LexerPopModeAction] action. + LexerActionTypePopMode = 4 + + // LexerActionTypePushMode represents a [LexerPushModeAction] action. + LexerActionTypePushMode = 5 + + // LexerActionTypeSkip represents a [LexerSkipAction] action. + LexerActionTypeSkip = 6 + + // LexerActionTypeType represents a [LexerTypeAction] action. + LexerActionTypeType = 7 ) type LexerAction interface { @@ -39,7 +54,7 @@ func NewBaseLexerAction(action int) *BaseLexerAction { return la } -func (b *BaseLexerAction) execute(lexer Lexer) { +func (b *BaseLexerAction) execute(_ Lexer) { panic("Not implemented") } @@ -52,17 +67,19 @@ func (b *BaseLexerAction) getIsPositionDependent() bool { } func (b *BaseLexerAction) Hash() int { - return b.actionType + h := murmurInit(0) + h = murmurUpdate(h, b.actionType) + return murmurFinish(h, 1) } func (b *BaseLexerAction) Equals(other LexerAction) bool { - return b == other + return b.actionType == other.getActionType() } -// Implements the {@code Skip} lexer action by calling {@link Lexer//Skip}. +// LexerSkipAction implements the [BaseLexerAction.Skip] lexer action by calling [Lexer.Skip]. // -//

The {@code Skip} command does not have any parameters, so l action is -// implemented as a singleton instance exposed by {@link //INSTANCE}.

+// The Skip command does not have any parameters, so this action is +// implemented as a singleton instance exposed by the [LexerSkipActionINSTANCE]. type LexerSkipAction struct { *BaseLexerAction } @@ -73,17 +90,22 @@ func NewLexerSkipAction() *LexerSkipAction { return la } -// Provides a singleton instance of l parameterless lexer action. +// LexerSkipActionINSTANCE provides a singleton instance of this parameterless lexer action. var LexerSkipActionINSTANCE = NewLexerSkipAction() func (l *LexerSkipAction) execute(lexer Lexer) { lexer.Skip() } +// String returns a string representation of the current [LexerSkipAction]. func (l *LexerSkipAction) String() string { return "skip" } +func (b *LexerSkipAction) Equals(other LexerAction) bool { + return other.getActionType() == LexerActionTypeSkip +} + // Implements the {@code type} lexer action by calling {@link Lexer//setType} // // with the assigned type. @@ -125,11 +147,10 @@ func (l *LexerTypeAction) String() string { return "actionType(" + strconv.Itoa(l.thetype) + ")" } -// Implements the {@code pushMode} lexer action by calling -// {@link Lexer//pushMode} with the assigned mode. +// LexerPushModeAction implements the pushMode lexer action by calling +// [Lexer.pushMode] with the assigned mode. type LexerPushModeAction struct { *BaseLexerAction - mode int } @@ -169,10 +190,10 @@ func (l *LexerPushModeAction) String() string { return "pushMode(" + strconv.Itoa(l.mode) + ")" } -// Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}. +// LexerPopModeAction implements the popMode lexer action by calling [Lexer.popMode]. // -//

The {@code popMode} command does not have any parameters, so l action is -// implemented as a singleton instance exposed by {@link //INSTANCE}.

+// The popMode command does not have any parameters, so this action is +// implemented as a singleton instance exposed by [LexerPopModeActionINSTANCE] type LexerPopModeAction struct { *BaseLexerAction } @@ -224,11 +245,10 @@ func (l *LexerMoreAction) String() string { return "more" } -// Implements the {@code mode} lexer action by calling {@link Lexer//mode} with +// LexerModeAction implements the mode lexer action by calling [Lexer.mode] with // the assigned mode. type LexerModeAction struct { *BaseLexerAction - mode int } @@ -322,16 +342,19 @@ func (l *LexerCustomAction) Equals(other LexerAction) bool { } } -// Implements the {@code channel} lexer action by calling -// {@link Lexer//setChannel} with the assigned channel. -// Constructs a New{@code channel} action with the specified channel value. -// @param channel The channel value to pass to {@link Lexer//setChannel}. +// LexerChannelAction implements the channel lexer action by calling +// [Lexer.setChannel] with the assigned channel. +// +// Constructs a new channel action with the specified channel value. type LexerChannelAction struct { *BaseLexerAction - channel int } +// NewLexerChannelAction creates a channel lexer action by calling +// [Lexer.setChannel] with the assigned channel. +// +// Constructs a new channel action with the specified channel value. func NewLexerChannelAction(channel int) *LexerChannelAction { l := new(LexerChannelAction) l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeChannel) @@ -375,25 +398,22 @@ func (l *LexerChannelAction) String() string { // lexer actions, see {@link LexerActionExecutor//append} and // {@link LexerActionExecutor//fixOffsetBeforeMatch}.

-// Constructs a Newindexed custom action by associating a character offset -// with a {@link LexerAction}. -// -//

Note: This class is only required for lexer actions for which -// {@link LexerAction//isPositionDependent} returns {@code true}.

-// -// @param offset The offset into the input {@link CharStream}, relative to -// the token start index, at which the specified lexer action should be -// executed. -// @param action The lexer action to execute at a particular offset in the -// input {@link CharStream}. type LexerIndexedCustomAction struct { *BaseLexerAction - offset int lexerAction LexerAction isPositionDependent bool } +// NewLexerIndexedCustomAction constructs a new indexed custom action by associating a character offset +// with a [LexerAction]. +// +// Note: This class is only required for lexer actions for which +// [LexerAction.isPositionDependent] returns true. +// +// The offset points into the input [CharStream], relative to +// the token start index, at which the specified lexerAction should be +// executed. func NewLexerIndexedCustomAction(offset int, lexerAction LexerAction) *LexerIndexedCustomAction { l := new(LexerIndexedCustomAction) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action_executor.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go similarity index 70% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action_executor.go rename to vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go index be1ba7a7e..dfc28c32b 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_action_executor.go +++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_action_executor.go @@ -29,28 +29,20 @@ func NewLexerActionExecutor(lexerActions []LexerAction) *LexerActionExecutor { l.lexerActions = lexerActions // Caches the result of {@link //hashCode} since the hash code is an element - // of the performance-critical {@link LexerATNConfig//hashCode} operation. - l.cachedHash = murmurInit(57) + // of the performance-critical {@link ATNConfig//hashCode} operation. + l.cachedHash = murmurInit(0) for _, a := range lexerActions { l.cachedHash = murmurUpdate(l.cachedHash, a.Hash()) } + l.cachedHash = murmurFinish(l.cachedHash, len(lexerActions)) return l } -// Creates a {@link LexerActionExecutor} which executes the actions for -// the input {@code lexerActionExecutor} followed by a specified -// {@code lexerAction}. -// -// @param lexerActionExecutor The executor for actions already traversed by -// the lexer while Matching a token within a particular -// {@link LexerATNConfig}. If this is {@code nil}, the method behaves as -// though it were an empty executor. -// @param lexerAction The lexer action to execute after the actions -// specified in {@code lexerActionExecutor}. -// -// @return A {@link LexerActionExecutor} for executing the combine actions -// of {@code lexerActionExecutor} and {@code lexerAction}. +// LexerActionExecutorappend creates a [LexerActionExecutor] which executes the actions for +// the input [LexerActionExecutor] followed by a specified +// [LexerAction]. +// TODO: This does not match the Java code func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAction LexerAction) *LexerActionExecutor { if lexerActionExecutor == nil { return NewLexerActionExecutor([]LexerAction{lexerAction}) @@ -59,47 +51,42 @@ func LexerActionExecutorappend(lexerActionExecutor *LexerActionExecutor, lexerAc return NewLexerActionExecutor(append(lexerActionExecutor.lexerActions, lexerAction)) } -// Creates a {@link LexerActionExecutor} which encodes the current offset +// fixOffsetBeforeMatch creates a [LexerActionExecutor] which encodes the current offset // for position-dependent lexer actions. // -//

Normally, when the executor encounters lexer actions where -// {@link LexerAction//isPositionDependent} returns {@code true}, it calls -// {@link IntStream//seek} on the input {@link CharStream} to set the input -// position to the end of the current token. This behavior provides -// for efficient DFA representation of lexer actions which appear at the end +// Normally, when the executor encounters lexer actions where +// [LexerAction.isPositionDependent] returns true, it calls +// [IntStream.Seek] on the input [CharStream] to set the input +// position to the end of the current token. This behavior provides +// for efficient [DFA] representation of lexer actions which appear at the end // of a lexer rule, even when the lexer rule Matches a variable number of -// characters.

+// characters. // -//

Prior to traversing a Match transition in the ATN, the current offset +// Prior to traversing a Match transition in the [ATN], the current offset // from the token start index is assigned to all position-dependent lexer // actions which have not already been assigned a fixed offset. By storing -// the offsets relative to the token start index, the DFA representation of +// the offsets relative to the token start index, the [DFA] representation of // lexer actions which appear in the middle of tokens remains efficient due -// to sharing among tokens of the same length, regardless of their absolute -// position in the input stream.

+// to sharing among tokens of the same Length, regardless of their absolute +// position in the input stream. // -//

If the current executor already has offsets assigned to all -// position-dependent lexer actions, the method returns {@code this}.

+// If the current executor already has offsets assigned to all +// position-dependent lexer actions, the method returns this instance. // -// @param offset The current offset to assign to all position-dependent +// The offset is assigned to all position-dependent // lexer actions which do not already have offsets assigned. // -// @return A {@link LexerActionExecutor} which stores input stream offsets +// The func returns a [LexerActionExecutor] that stores input stream offsets // for all position-dependent lexer actions. -// / func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecutor { var updatedLexerActions []LexerAction for i := 0; i < len(l.lexerActions); i++ { _, ok := l.lexerActions[i].(*LexerIndexedCustomAction) if l.lexerActions[i].getIsPositionDependent() && !ok { if updatedLexerActions == nil { - updatedLexerActions = make([]LexerAction, 0) - - for _, a := range l.lexerActions { - updatedLexerActions = append(updatedLexerActions, a) - } + updatedLexerActions = make([]LexerAction, 0, len(l.lexerActions)) + updatedLexerActions = append(updatedLexerActions, l.lexerActions...) } - updatedLexerActions[i] = NewLexerIndexedCustomAction(offset, l.lexerActions[i]) } } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go similarity index 80% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_atn_simulator.go rename to vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go index c573b7521..fe938b025 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/lexer_atn_simulator.go +++ b/vendor/github.com/antlr4-go/antlr/v4/lexer_atn_simulator.go @@ -10,10 +10,8 @@ import ( "strings" ) +//goland:noinspection GoUnusedGlobalVariable var ( - LexerATNSimulatorDebug = false - LexerATNSimulatorDFADebug = false - LexerATNSimulatorMinDFAEdge = 0 LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN @@ -32,11 +30,11 @@ type ILexerATNSimulator interface { } type LexerATNSimulator struct { - *BaseATNSimulator + BaseATNSimulator recog Lexer predictionMode int - mergeCache DoubleDict + mergeCache *JPCMap2 startIndex int Line int CharPositionInLine int @@ -46,27 +44,35 @@ type LexerATNSimulator struct { } func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator { - l := new(LexerATNSimulator) - - l.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache) + l := &LexerATNSimulator{ + BaseATNSimulator: BaseATNSimulator{ + atn: atn, + sharedContextCache: sharedContextCache, + }, + } l.decisionToDFA = decisionToDFA l.recog = recog + // The current token's starting index into the character stream. // Shared across DFA to ATN simulation in case the ATN fails and the // DFA did not have a previous accept state. In l case, we use the // ATN-generated exception object. l.startIndex = -1 - // line number 1..n within the input/// + + // line number 1..n within the input l.Line = 1 + // The index of the character relative to the beginning of the line - // 0..n-1/// + // 0..n-1 l.CharPositionInLine = 0 + l.mode = LexerDefaultMode + // Used during DFA/ATN exec to record the most recent accept configuration // info l.prevAccept = NewSimState() - // done + return l } @@ -114,7 +120,7 @@ func (l *LexerATNSimulator) reset() { func (l *LexerATNSimulator) MatchATN(input CharStream) int { startState := l.atn.modeToStartState[l.mode] - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String()) } oldMode := l.mode @@ -126,7 +132,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int { predict := l.execATN(input, next) - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("DFA after MatchATN: " + l.decisionToDFA[oldMode].ToLexerString()) } return predict @@ -134,18 +140,18 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int { func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int { - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("start state closure=" + ds0.configs.String()) } if ds0.isAcceptState { - // allow zero-length tokens + // allow zero-Length tokens l.captureSimState(l.prevAccept, input, ds0) } t := input.LA(1) s := ds0 // s is current/from DFA state for { // while more work - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("execATN loop starting closure: " + s.configs.String()) } @@ -188,7 +194,7 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int { } } t = input.LA(1) - s = target // flip current DFA target becomes Newsrc/from state + s = target // flip current DFA target becomes new src/from state } return l.failOrAccept(l.prevAccept, input, s.configs, t) @@ -214,43 +220,39 @@ func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState return nil } target := s.getIthEdge(t - LexerATNSimulatorMinDFAEdge) - if LexerATNSimulatorDebug && target != nil { + if runtimeConfig.lexerATNSimulatorDebug && target != nil { fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber)) } return target } -// Compute a target state for an edge in the DFA, and attempt to add the -// computed state and corresponding edge to the DFA. +// computeTargetState computes a target state for an edge in the [DFA], and attempt to add the +// computed state and corresponding edge to the [DFA]. // -// @param input The input stream -// @param s The current DFA state -// @param t The next input symbol -// -// @return The computed target DFA state for the given input symbol -// {@code t}. If {@code t} does not lead to a valid DFA state, l method -// returns {@link //ERROR}. +// The func returns the computed target [DFA] state for the given input symbol t. +// If this does not lead to a valid [DFA] state, this method +// returns ATNSimulatorError. func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState { reach := NewOrderedATNConfigSet() // if we don't find an existing DFA state // Fill reach starting from closure, following t transitions - l.getReachableConfigSet(input, s.configs, reach.BaseATNConfigSet, t) + l.getReachableConfigSet(input, s.configs, reach, t) if len(reach.configs) == 0 { // we got nowhere on t from s if !reach.hasSemanticContext { // we got nowhere on t, don't panic out l knowledge it'd - // cause a failover from DFA later. + // cause a fail-over from DFA later. l.addDFAEdge(s, t, ATNSimulatorError, nil) } // stop when we can't Match any more char return ATNSimulatorError } // Add an edge from s to target DFA found/created for reach - return l.addDFAEdge(s, t, nil, reach.BaseATNConfigSet) + return l.addDFAEdge(s, t, nil, reach) } -func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int { +func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int { if l.prevAccept.dfaState != nil { lexerActionExecutor := prevAccept.dfaState.lexerActionExecutor l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column) @@ -265,34 +267,35 @@ func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, panic(NewLexerNoViableAltException(l.recog, input, l.startIndex, reach)) } -// Given a starting configuration set, figure out all ATN configurations -// we can reach upon input {@code t}. Parameter {@code reach} is a return -// parameter. -func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNConfigSet, reach ATNConfigSet, t int) { +// getReachableConfigSet when given a starting configuration set, figures out all [ATN] configurations +// we can reach upon input t. +// +// Parameter reach is a return parameter. +func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *ATNConfigSet, reach *ATNConfigSet, t int) { // l is used to Skip processing for configs which have a lower priority - // than a config that already reached an accept state for the same rule + // than a runtimeConfig that already reached an accept state for the same rule SkipAlt := ATNInvalidAltNumber - for _, cfg := range closure.GetItems() { - currentAltReachedAcceptState := (cfg.GetAlt() == SkipAlt) - if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision { + for _, cfg := range closure.configs { + currentAltReachedAcceptState := cfg.GetAlt() == SkipAlt + if currentAltReachedAcceptState && cfg.passedThroughNonGreedyDecision { continue } - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { - fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String()) // l.recog, true)) + fmt.Printf("testing %s at %s\n", l.GetTokenName(t), cfg.String()) } for _, trans := range cfg.GetState().GetTransitions() { target := l.getReachableTarget(trans, t) if target != nil { - lexerActionExecutor := cfg.(*LexerATNConfig).lexerActionExecutor + lexerActionExecutor := cfg.lexerActionExecutor if lexerActionExecutor != nil { lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.Index() - l.startIndex) } - treatEOFAsEpsilon := (t == TokenEOF) - config := NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor) + treatEOFAsEpsilon := t == TokenEOF + config := NewLexerATNConfig3(cfg, target, lexerActionExecutor) if l.closure(input, config, reach, currentAltReachedAcceptState, true, treatEOFAsEpsilon) { // any remaining configs for l alt have a lower priority @@ -305,7 +308,7 @@ func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNC } func (l *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) { - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Printf("ACTION %v\n", lexerActionExecutor) } // seek to after last char in token @@ -325,7 +328,7 @@ func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState return nil } -func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet { +func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *ATNConfigSet { configs := NewOrderedATNConfigSet() for i := 0; i < len(p.GetTransitions()); i++ { target := p.GetTransitions()[i].getTarget() @@ -336,25 +339,24 @@ func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *Ord return configs } -// Since the alternatives within any lexer decision are ordered by -// preference, l method stops pursuing the closure as soon as an accept +// closure since the alternatives within any lexer decision are ordered by +// preference, this method stops pursuing the closure as soon as an accept // state is reached. After the first accept state is reached by depth-first -// search from {@code config}, all other (potentially reachable) states for -// l rule would have a lower priority. +// search from runtimeConfig, all other (potentially reachable) states for +// this rule would have a lower priority. // -// @return {@code true} if an accept state is reached, otherwise -// {@code false}. -func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs ATNConfigSet, +// The func returns true if an accept state is reached. +func (l *LexerATNSimulator) closure(input CharStream, config *ATNConfig, configs *ATNConfigSet, currentAltReachedAcceptState, speculative, treatEOFAsEpsilon bool) bool { - if LexerATNSimulatorDebug { - fmt.Println("closure(" + config.String() + ")") // config.String(l.recog, true) + ")") + if runtimeConfig.lexerATNSimulatorDebug { + fmt.Println("closure(" + config.String() + ")") } _, ok := config.state.(*RuleStopState) if ok { - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { if l.recog != nil { fmt.Printf("closure at %s rule stop %s\n", l.recog.GetRuleNames()[config.state.GetRuleIndex()], config) } else { @@ -401,10 +403,10 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co } // side-effect: can alter configs.hasSemanticContext -func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans Transition, - configs ATNConfigSet, speculative, treatEOFAsEpsilon bool) *LexerATNConfig { +func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *ATNConfig, trans Transition, + configs *ATNConfigSet, speculative, treatEOFAsEpsilon bool) *ATNConfig { - var cfg *LexerATNConfig + var cfg *ATNConfig if trans.getSerializationType() == TransitionRULE { @@ -435,10 +437,10 @@ func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNC pt := trans.(*PredicateTransition) - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("EVAL rule " + strconv.Itoa(trans.(*PredicateTransition).ruleIndex) + ":" + strconv.Itoa(pt.predIndex)) } - configs.SetHasSemanticContext(true) + configs.hasSemanticContext = true if l.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative) { cfg = NewLexerATNConfig4(config, trans.getTarget()) } @@ -449,7 +451,7 @@ func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNC // TODO: if the entry rule is invoked recursively, some // actions may be executed during the recursive call. The // problem can appear when hasEmptyPath() is true but - // isEmpty() is false. In l case, the config needs to be + // isEmpty() is false. In this case, the config needs to be // split into two contexts - one with just the empty path // and another with everything but the empty path. // Unfortunately, the current algorithm does not allow @@ -476,26 +478,18 @@ func (l *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNC return cfg } -// Evaluate a predicate specified in the lexer. +// evaluatePredicate eEvaluates a predicate specified in the lexer. // -//

If {@code speculative} is {@code true}, l method was called before -// {@link //consume} for the Matched character. This method should call -// {@link //consume} before evaluating the predicate to ensure position -// sensitive values, including {@link Lexer//GetText}, {@link Lexer//GetLine}, -// and {@link Lexer//getcolumn}, properly reflect the current -// lexer state. This method should restore {@code input} and the simulator -// to the original state before returning (i.e. undo the actions made by the -// call to {@link //consume}.

+// If speculative is true, this method was called before +// [consume] for the Matched character. This method should call +// [consume] before evaluating the predicate to ensure position +// sensitive values, including [GetText], [GetLine], +// and [GetColumn], properly reflect the current +// lexer state. This method should restore input and the simulator +// to the original state before returning, i.e. undo the actions made by the +// call to [Consume]. // -// @param input The input stream. -// @param ruleIndex The rule containing the predicate. -// @param predIndex The index of the predicate within the rule. -// @param speculative {@code true} if the current index in {@code input} is -// one character before the predicate's location. -// -// @return {@code true} if the specified predicate evaluates to -// {@code true}. -// / +// The func returns true if the specified predicate evaluates to true. func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool { // assume true if no recognizer was provided if l.recog == nil { @@ -527,7 +521,7 @@ func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream settings.dfaState = dfaState } -func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState { +func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs *ATNConfigSet) *DFAState { if to == nil && cfgs != nil { // leading to l call, ATNConfigSet.hasSemanticContext is used as a // marker indicating dynamic predicate evaluation makes l edge @@ -539,10 +533,9 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg // TJP notes: next time through the DFA, we see a pred again and eval. // If that gets us to a previously created (but dangling) DFA // state, we can continue in pure DFA mode from there. - // / - suppressEdge := cfgs.HasSemanticContext() - cfgs.SetHasSemanticContext(false) - + // + suppressEdge := cfgs.hasSemanticContext + cfgs.hasSemanticContext = false to = l.addDFAState(cfgs, true) if suppressEdge { @@ -554,7 +547,7 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg // Only track edges within the DFA bounds return to } - if LexerATNSimulatorDebug { + if runtimeConfig.lexerATNSimulatorDebug { fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk)) } l.atn.edgeMu.Lock() @@ -572,13 +565,12 @@ func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfg // configurations already. This method also detects the first // configuration containing an ATN rule stop state. Later, when // traversing the DFA, we will know which rule to accept. -func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) *DFAState { +func (l *LexerATNSimulator) addDFAState(configs *ATNConfigSet, suppressEdge bool) *DFAState { proposed := NewDFAState(-1, configs) - var firstConfigWithRuleStopState ATNConfig - - for _, cfg := range configs.GetItems() { + var firstConfigWithRuleStopState *ATNConfig + for _, cfg := range configs.configs { _, ok := cfg.GetState().(*RuleStopState) if ok { @@ -588,14 +580,14 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) } if firstConfigWithRuleStopState != nil { proposed.isAcceptState = true - proposed.lexerActionExecutor = firstConfigWithRuleStopState.(*LexerATNConfig).lexerActionExecutor + proposed.lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor proposed.setPrediction(l.atn.ruleToTokenType[firstConfigWithRuleStopState.GetState().GetRuleIndex()]) } dfa := l.decisionToDFA[l.mode] l.atn.stateMu.Lock() defer l.atn.stateMu.Unlock() - existing, present := dfa.states.Get(proposed) + existing, present := dfa.Get(proposed) if present { // This state was already present, so just return it. @@ -605,10 +597,11 @@ func (l *LexerATNSimulator) addDFAState(configs ATNConfigSet, suppressEdge bool) // We need to add the new state // - proposed.stateNumber = dfa.states.Len() - configs.SetReadOnly(true) + proposed.stateNumber = dfa.Len() + configs.readOnly = true + configs.configLookup = nil // Not needed now proposed.configs = configs - dfa.states.Put(proposed) + dfa.Put(proposed) } if !suppressEdge { dfa.setS0(proposed) @@ -620,7 +613,7 @@ func (l *LexerATNSimulator) getDFA(mode int) *DFA { return l.decisionToDFA[mode] } -// Get the text Matched so far for the current token. +// GetText returns the text [Match]ed so far for the current token. func (l *LexerATNSimulator) GetText(input CharStream) string { // index is first lookahead char, don't include. return input.GetTextFromInterval(NewInterval(l.startIndex, input.Index()-1)) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/ll1_analyzer.go b/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go similarity index 73% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/ll1_analyzer.go rename to vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go index 76689615a..4955ac876 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/ll1_analyzer.go +++ b/vendor/github.com/antlr4-go/antlr/v4/ll1_analyzer.go @@ -14,11 +14,11 @@ func NewLL1Analyzer(atn *ATN) *LL1Analyzer { return la } -// - Special value added to the lookahead sets to indicate that we hit -// a predicate during analysis if {@code seeThruPreds==false}. -// -// / const ( + // LL1AnalyzerHitPred is a special value added to the lookahead sets to indicate that we hit + // a predicate during analysis if + // + // seeThruPreds==false LL1AnalyzerHitPred = TokenInvalidType ) @@ -38,11 +38,12 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet { count := len(s.GetTransitions()) look := make([]*IntervalSet, count) for alt := 0; alt < count; alt++ { + look[alt] = NewIntervalSet() - lookBusy := NewJStore[ATNConfig, Comparator[ATNConfig]](aConfEqInst) - seeThruPreds := false // fail to get lookahead upon pred - la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false) - // Wipe out lookahead for la alternative if we found nothing + lookBusy := NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.getDecisionLookahead for lookBusy") + la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), false, false) + + // Wipe out lookahead for la alternative if we found nothing, // or we had a predicate when we !seeThruPreds if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) { look[alt] = nil @@ -51,32 +52,31 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet { return look } -// * -// Compute set of tokens that can follow {@code s} in the ATN in the -// specified {@code ctx}. +// Look computes the set of tokens that can follow s in the [ATN] in the +// specified ctx. // -//

If {@code ctx} is {@code nil} and the end of the rule containing -// {@code s} is reached, {@link Token//EPSILON} is added to the result set. -// If {@code ctx} is not {@code nil} and the end of the outermost rule is -// reached, {@link Token//EOF} is added to the result set.

+// If ctx is nil and the end of the rule containing +// s is reached, [EPSILON] is added to the result set. // -// @param s the ATN state -// @param stopState the ATN state to stop at. This can be a -// {@link BlockEndState} to detect epsilon paths through a closure. -// @param ctx the complete parser context, or {@code nil} if the context +// If ctx is not nil and the end of the outermost rule is +// reached, [EOF] is added to the result set. +// +// Parameter s the ATN state, and stopState is the ATN state to stop at. This can be a +// [BlockEndState] to detect epsilon paths through a closure. +// +// Parameter ctx is the complete parser context, or nil if the context // should be ignored // -// @return The set of tokens that can follow {@code s} in the ATN in the -// specified {@code ctx}. -// / +// The func returns the set of tokens that can follow s in the [ATN] in the +// specified ctx. func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet { r := NewIntervalSet() - seeThruPreds := true // ignore preds get all lookahead - var lookContext PredictionContext + var lookContext *PredictionContext if ctx != nil { lookContext = predictionContextFromRuleContext(s.GetATN(), ctx) } - la.look1(s, stopState, lookContext, r, NewJStore[ATNConfig, Comparator[ATNConfig]](aConfEqInst), NewBitSet(), seeThruPreds, true) + la.look1(s, stopState, lookContext, r, NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, "LL1Analyzer.Look for la.look1()"), + NewBitSet(), true, true) return r } @@ -110,16 +110,17 @@ func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet // outermost context is reached. This parameter has no effect if {@code ctx} // is {@code nil}. -func (la *LL1Analyzer) look2(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *JStore[ATNConfig, Comparator[ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) { +func (la *LL1Analyzer) look2(_, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]], + calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) { returnState := la.atn.states[ctx.getReturnState(i)] la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF) } -func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *JStore[ATNConfig, Comparator[ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool) { +func (la *LL1Analyzer) look1(s, stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool) { - c := NewBaseATNConfig6(s, 0, ctx) + c := NewATNConfig6(s, 0, ctx) if lookBusy.Contains(c) { return @@ -151,7 +152,7 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look return } - if ctx != BasePredictionContextEMPTY { + if ctx.pcType != PredictionContextEmpty { removed := calledRuleStack.contains(s.GetRuleIndex()) defer func() { if removed { @@ -202,7 +203,8 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look } } -func (la *LL1Analyzer) look3(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *JStore[ATNConfig, Comparator[ATNConfig]], calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) { +func (la *LL1Analyzer) look3(stopState ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *JStore[*ATNConfig, Comparator[*ATNConfig]], + calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) { newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber()) diff --git a/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go b/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go new file mode 100644 index 000000000..923c7b52c --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/nostatistics.go @@ -0,0 +1,47 @@ +//go:build !antlr.stats + +package antlr + +// This file is compiled when the build configuration antlr.stats is not enabled. +// which then allows the compiler to optimize out all the code that is not used. +const collectStats = false + +// goRunStats is a dummy struct used when build configuration antlr.stats is not enabled. +type goRunStats struct { +} + +var Statistics = &goRunStats{} + +func (s *goRunStats) AddJStatRec(_ *JStatRec) { + // Do nothing - compiler will optimize this out (hopefully) +} + +func (s *goRunStats) CollectionAnomalies() { + // Do nothing - compiler will optimize this out (hopefully) +} + +func (s *goRunStats) Reset() { + // Do nothing - compiler will optimize this out (hopefully) +} + +func (s *goRunStats) Report(dir string, prefix string) error { + // Do nothing - compiler will optimize this out (hopefully) + return nil +} + +func (s *goRunStats) Analyze() { + // Do nothing - compiler will optimize this out (hopefully) +} + +type statsOption func(*goRunStats) error + +func (s *goRunStats) Configure(options ...statsOption) error { + // Do nothing - compiler will optimize this out (hopefully) + return nil +} + +func WithTopN(topN int) statsOption { + return func(s *goRunStats) error { + return nil + } +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser.go b/vendor/github.com/antlr4-go/antlr/v4/parser.go similarity index 80% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser.go rename to vendor/github.com/antlr4-go/antlr/v4/parser.go index d26bf0639..fb57ac15d 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser.go +++ b/vendor/github.com/antlr4-go/antlr/v4/parser.go @@ -48,8 +48,10 @@ type BaseParser struct { _SyntaxErrors int } -// p.is all the parsing support code essentially most of it is error -// recovery stuff.// +// NewBaseParser contains all the parsing support code to embed in parsers. Essentially most of it is error +// recovery stuff. +// +//goland:noinspection GoUnusedExportedFunction func NewBaseParser(input TokenStream) *BaseParser { p := new(BaseParser) @@ -58,39 +60,46 @@ func NewBaseParser(input TokenStream) *BaseParser { // The input stream. p.input = nil + // The error handling strategy for the parser. The default value is a new // instance of {@link DefaultErrorStrategy}. p.errHandler = NewDefaultErrorStrategy() p.precedenceStack = make([]int, 0) p.precedenceStack.Push(0) - // The {@link ParserRuleContext} object for the currently executing rule. + + // The ParserRuleContext object for the currently executing rule. // p.is always non-nil during the parsing process. p.ctx = nil - // Specifies whether or not the parser should construct a parse tree during + + // Specifies whether the parser should construct a parse tree during // the parsing process. The default value is {@code true}. p.BuildParseTrees = true - // When {@link //setTrace}{@code (true)} is called, a reference to the - // {@link TraceListener} is stored here so it can be easily removed in a - // later call to {@link //setTrace}{@code (false)}. The listener itself is + + // When setTrace(true) is called, a reference to the + // TraceListener is stored here, so it can be easily removed in a + // later call to setTrace(false). The listener itself is // implemented as a parser listener so p.field is not directly used by // other parser methods. p.tracer = nil - // The list of {@link ParseTreeListener} listeners registered to receive + + // The list of ParseTreeListener listeners registered to receive // events during the parse. p.parseListeners = nil + // The number of syntax errors Reported during parsing. p.value is - // incremented each time {@link //NotifyErrorListeners} is called. + // incremented each time NotifyErrorListeners is called. p._SyntaxErrors = 0 p.SetInputStream(input) return p } -// p.field maps from the serialized ATN string to the deserialized {@link -// ATN} with +// This field maps from the serialized ATN string to the deserialized [ATN] with // bypass alternatives. // -// @see ATNDeserializationOptions//isGenerateRuleBypassTransitions() +// [ATNDeserializationOptions.isGenerateRuleBypassTransitions] +// +//goland:noinspection GoUnusedGlobalVariable var bypassAltsAtnCache = make(map[string]int) // reset the parser's state// @@ -143,10 +152,13 @@ func (p *BaseParser) Match(ttype int) Token { p.Consume() } else { t = p.errHandler.RecoverInline(p) + if p.HasError() { + return nil + } if p.BuildParseTrees && t.GetTokenIndex() == -1 { - // we must have conjured up a Newtoken during single token - // insertion - // if it's not the current symbol + + // we must have conjured up a new token during single token + // insertion if it's not the current symbol p.ctx.AddErrorNode(t) } } @@ -178,9 +190,8 @@ func (p *BaseParser) MatchWildcard() Token { } else { t = p.errHandler.RecoverInline(p) if p.BuildParseTrees && t.GetTokenIndex() == -1 { - // we must have conjured up a Newtoken during single token - // insertion - // if it's not the current symbol + // we must have conjured up a new token during single token + // insertion if it's not the current symbol p.ctx.AddErrorNode(t) } } @@ -202,33 +213,27 @@ func (p *BaseParser) GetParseListeners() []ParseTreeListener { return p.parseListeners } -// Registers {@code listener} to receive events during the parsing process. +// AddParseListener registers listener to receive events during the parsing process. // -//

To support output-preserving grammar transformations (including but not +// To support output-preserving grammar transformations (including but not // limited to left-recursion removal, automated left-factoring, and // optimized code generation), calls to listener methods during the parse // may differ substantially from calls made by -// {@link ParseTreeWalker//DEFAULT} used after the parse is complete. In +// [ParseTreeWalker.DEFAULT] used after the parse is complete. In // particular, rule entry and exit events may occur in a different order // during the parse than after the parser. In addition, calls to certain -// rule entry methods may be omitted.

-// -//

With the following specific exceptions, calls to listener events are -// deterministic, i.e. for identical input the calls to listener -// methods will be the same.

-// -//
    -//
  • Alterations to the grammar used to generate code may change the -// behavior of the listener calls.
  • -//
  • Alterations to the command line options passed to ANTLR 4 when -// generating the parser may change the behavior of the listener calls.
  • -//
  • Changing the version of the ANTLR Tool used to generate the parser -// may change the behavior of the listener calls.
  • -//
+// rule entry methods may be omitted. // -// @param listener the listener to add +// With the following specific exceptions, calls to listener events are +// deterministic, i.e. for identical input the calls to listener +// methods will be the same. // -// @panics nilPointerException if {@code} listener is {@code nil} +// - Alterations to the grammar used to generate code may change the +// behavior of the listener calls. +// - Alterations to the command line options passed to ANTLR 4 when +// generating the parser may change the behavior of the listener calls. +// - Changing the version of the ANTLR Tool used to generate the parser +// may change the behavior of the listener calls. func (p *BaseParser) AddParseListener(listener ParseTreeListener) { if listener == nil { panic("listener") @@ -239,11 +244,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) { p.parseListeners = append(p.parseListeners, listener) } -// Remove {@code listener} from the list of parse listeners. +// RemoveParseListener removes listener from the list of parse listeners. // -//

If {@code listener} is {@code nil} or has not been added as a parse -// listener, p.method does nothing.

-// @param listener the listener to remove +// If listener is nil or has not been added as a parse +// listener, this func does nothing. func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) { if p.parseListeners != nil { @@ -274,7 +278,7 @@ func (p *BaseParser) removeParseListeners() { p.parseListeners = nil } -// Notify any parse listeners of an enter rule event. +// TriggerEnterRuleEvent notifies all parse listeners of an enter rule event. func (p *BaseParser) TriggerEnterRuleEvent() { if p.parseListeners != nil { ctx := p.ctx @@ -285,9 +289,7 @@ func (p *BaseParser) TriggerEnterRuleEvent() { } } -// Notify any parse listeners of an exit rule event. -// -// @see //addParseListener +// TriggerExitRuleEvent notifies any parse listeners of an exit rule event. func (p *BaseParser) TriggerExitRuleEvent() { if p.parseListeners != nil { // reverse order walk of listeners @@ -314,19 +316,16 @@ func (p *BaseParser) GetTokenFactory() TokenFactory { return p.input.GetTokenSource().GetTokenFactory() } -// Tell our token source and error strategy about a Newway to create tokens.// +// setTokenFactory is used to tell our token source and error strategy about a new way to create tokens. func (p *BaseParser) setTokenFactory(factory TokenFactory) { p.input.GetTokenSource().setTokenFactory(factory) } -// The ATN with bypass alternatives is expensive to create so we create it +// GetATNWithBypassAlts - the ATN with bypass alternatives is expensive to create, so we create it // lazily. -// -// @panics UnsupportedOperationException if the current parser does not -// implement the {@link //getSerializedATN()} method. func (p *BaseParser) GetATNWithBypassAlts() { - // TODO + // TODO - Implement this? panic("Not implemented!") // serializedAtn := p.getSerializedATN() @@ -354,6 +353,7 @@ func (p *BaseParser) GetATNWithBypassAlts() { // String id = m.Get("ID") // +//goland:noinspection GoUnusedParameter func (p *BaseParser) compileParseTreePattern(pattern, patternRuleIndex, lexer Lexer) { panic("NewParseTreePatternMatcher not implemented!") @@ -386,14 +386,16 @@ func (p *BaseParser) GetTokenStream() TokenStream { return p.input } -// Set the token stream and reset the parser.// +// SetTokenStream installs input as the token stream and resets the parser. func (p *BaseParser) SetTokenStream(input TokenStream) { p.input = nil p.reset() p.input = input } -// Match needs to return the current input symbol, which gets put +// GetCurrentToken returns the current token at LT(1). +// +// [Match] needs to return the current input symbol, which gets put // into the label for the associated token ref e.g., x=ID. func (p *BaseParser) GetCurrentToken() Token { return p.input.LT(1) @@ -446,7 +448,7 @@ func (p *BaseParser) addContextToParseTree() { } } -func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, ruleIndex int) { +func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, _ int) { p.SetState(state) p.ctx = localctx p.ctx.SetStart(p.input.LT(1)) @@ -474,7 +476,7 @@ func (p *BaseParser) ExitRule() { func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) { localctx.SetAltNumber(altNum) - // if we have Newlocalctx, make sure we replace existing ctx + // if we have a new localctx, make sure we replace existing ctx // that is previous child of parse tree if p.BuildParseTrees && p.ctx != localctx { if p.ctx.GetParent() != nil { @@ -498,7 +500,7 @@ func (p *BaseParser) GetPrecedence() int { return p.precedenceStack[len(p.precedenceStack)-1] } -func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) { +func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, _, precedence int) { p.SetState(state) p.precedenceStack.Push(precedence) p.ctx = localctx @@ -512,7 +514,7 @@ func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleI // // Like {@link //EnterRule} but for recursive rules. -func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) { +func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, _ int) { previous := p.ctx previous.SetParent(localctx) previous.SetInvokingState(state) @@ -530,7 +532,7 @@ func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, } func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) { - p.precedenceStack.Pop() + _, _ = p.precedenceStack.Pop() p.ctx.SetStop(p.input.LT(-1)) retCtx := p.ctx // save current ctx (return value) // unroll so ctx is as it was before call to recursive method @@ -561,29 +563,22 @@ func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext { return nil } -func (p *BaseParser) Precpred(localctx RuleContext, precedence int) bool { +func (p *BaseParser) Precpred(_ RuleContext, precedence int) bool { return precedence >= p.precedenceStack[len(p.precedenceStack)-1] } +//goland:noinspection GoUnusedParameter func (p *BaseParser) inContext(context ParserRuleContext) bool { // TODO: useful in parser? return false } -// -// Checks whether or not {@code symbol} can follow the current state in the -// ATN. The behavior of p.method is equivalent to the following, but is +// IsExpectedToken checks whether symbol can follow the current state in the +// {ATN}. The behavior of p.method is equivalent to the following, but is // implemented such that the complete context-sensitive follow set does not // need to be explicitly constructed. // -//
-// return getExpectedTokens().contains(symbol)
-// 
-// -// @param symbol the symbol type to check -// @return {@code true} if {@code symbol} can follow the current state in -// the ATN, otherwise {@code false}. - +// return getExpectedTokens().contains(symbol) func (p *BaseParser) IsExpectedToken(symbol int) bool { atn := p.Interpreter.atn ctx := p.ctx @@ -611,11 +606,9 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool { return false } -// Computes the set of input symbols which could follow the current parser -// state and context, as given by {@link //GetState} and {@link //GetContext}, +// GetExpectedTokens and returns the set of input symbols which could follow the current parser +// state and context, as given by [GetState] and [GetContext], // respectively. -// -// @see ATN//getExpectedTokens(int, RuleContext) func (p *BaseParser) GetExpectedTokens() *IntervalSet { return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx) } @@ -626,7 +619,7 @@ func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet { return atn.NextTokens(s, nil) } -// Get a rule's index (i.e., {@code RULE_ruleName} field) or -1 if not found.// +// GetRuleIndex get a rule's index (i.e., RULE_ruleName field) or -1 if not found. func (p *BaseParser) GetRuleIndex(ruleName string) int { var ruleIndex, ok = p.GetRuleIndexMap()[ruleName] if ok { @@ -636,13 +629,10 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int { return -1 } -// Return List<String> of the rule names in your parser instance +// GetRuleInvocationStack returns a list of the rule names in your parser instance // leading up to a call to the current rule. You could override if // you want more details such as the file/line info of where // in the ATN a rule is invoked. -// -// this very useful for error messages. - func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string { if c == nil { c = p.ctx @@ -668,16 +658,16 @@ func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string { return stack } -// For debugging and other purposes.// +// GetDFAStrings returns a list of all DFA states used for debugging purposes func (p *BaseParser) GetDFAStrings() string { return fmt.Sprint(p.Interpreter.decisionToDFA) } -// For debugging and other purposes.// +// DumpDFA prints the whole of the DFA for debugging func (p *BaseParser) DumpDFA() { seenOne := false for _, dfa := range p.Interpreter.decisionToDFA { - if dfa.states.Len() > 0 { + if dfa.Len() > 0 { if seenOne { fmt.Println() } @@ -692,8 +682,10 @@ func (p *BaseParser) GetSourceName() string { return p.GrammarFileName } -// During a parse is sometimes useful to listen in on the rule entry and exit -// events as well as token Matches. p.is for quick and dirty debugging. +// SetTrace installs a trace listener for the parse. +// +// During a parse it is sometimes useful to listen in on the rule entry and exit +// events as well as token Matches. This is for quick and dirty debugging. func (p *BaseParser) SetTrace(trace *TraceListener) { if trace == nil { p.RemoveParseListener(p.tracer) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_atn_simulator.go b/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go similarity index 64% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_atn_simulator.go rename to vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go index 8bcc46a0d..ae2869692 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_atn_simulator.go +++ b/vendor/github.com/antlr4-go/antlr/v4/parser_atn_simulator.go @@ -10,31 +10,51 @@ import ( "strings" ) -var ( - ParserATNSimulatorDebug = false - ParserATNSimulatorTraceATNSim = false - ParserATNSimulatorDFADebug = false - ParserATNSimulatorRetryDebug = false - TurnOffLRLoopEntryBranchOpt = false -) +var () + +// ClosureBusy is a store of ATNConfigs and is a tiny abstraction layer over +// a standard JStore so that we can use Lazy instantiation of the JStore, mostly +// to avoid polluting the stats module with a ton of JStore instances with nothing in them. +type ClosureBusy struct { + bMap *JStore[*ATNConfig, Comparator[*ATNConfig]] + desc string +} + +// NewClosureBusy creates a new ClosureBusy instance used to avoid infinite recursion for right-recursive rules +func NewClosureBusy(desc string) *ClosureBusy { + return &ClosureBusy{ + desc: desc, + } +} + +func (c *ClosureBusy) Put(config *ATNConfig) (*ATNConfig, bool) { + if c.bMap == nil { + c.bMap = NewJStore[*ATNConfig, Comparator[*ATNConfig]](aConfEqInst, ClosureBusyCollection, c.desc) + } + return c.bMap.Put(config) +} type ParserATNSimulator struct { - *BaseATNSimulator + BaseATNSimulator parser Parser predictionMode int input TokenStream startIndex int dfa *DFA - mergeCache *DoubleDict + mergeCache *JPCMap outerContext ParserRuleContext } +//goland:noinspection GoUnusedExportedFunction func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator { - p := new(ParserATNSimulator) - - p.BaseATNSimulator = NewBaseATNSimulator(atn, sharedContextCache) + p := &ParserATNSimulator{ + BaseATNSimulator: BaseATNSimulator{ + atn: atn, + sharedContextCache: sharedContextCache, + }, + } p.parser = parser p.decisionToDFA = decisionToDFA @@ -46,12 +66,12 @@ func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, shared p.outerContext = nil p.dfa = nil // Each prediction operation uses a cache for merge of prediction contexts. - // Don't keep around as it wastes huge amounts of memory. DoubleKeyMap - // isn't Synchronized but we're ok since two threads shouldn't reuse same - // parser/atnsim object because it can only handle one input at a time. - // This maps graphs a and b to merged result c. (a,b)&rarrc. We can avoid - // the merge if we ever see a and b again. Note that (b,a)&rarrc should - // also be examined during cache lookup. + // Don't keep around as it wastes huge amounts of memory. [JPCMap] + // isn't Synchronized, but we're ok since two threads shouldn't reuse same + // parser/atn-simulator object because it can only handle one input at a time. + // This maps graphs a and b to merged result c. (a,b) -> c. We can avoid + // the merge if we ever see a and b again. Note that (b,a) -> c should + // also be examined during cache lookup. // p.mergeCache = nil @@ -69,14 +89,14 @@ func (p *ParserATNSimulator) SetPredictionMode(v int) { func (p *ParserATNSimulator) reset() { } -func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext ParserRuleContext) int { - if ParserATNSimulatorDebug || ParserATNSimulatorTraceATNSim { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) AdaptivePredict(parser *BaseParser, input TokenStream, decision int, outerContext ParserRuleContext) int { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("adaptivePredict decision " + strconv.Itoa(decision) + " exec LA(1)==" + p.getLookaheadName(input) + " line " + strconv.Itoa(input.LT(1).GetLine()) + ":" + strconv.Itoa(input.LT(1).GetColumn())) } - p.input = input p.startIndex = input.Index() p.outerContext = outerContext @@ -88,7 +108,15 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou defer func() { p.dfa = nil - p.mergeCache = nil // wack cache after each prediction + p.mergeCache = nil // whack cache after each prediction + // Do not attempt to run a GC now that we're done with the cache as makes the + // GC overhead terrible for badly formed grammars and has little effect on well formed + // grammars. + // I have made some extra effort to try and reduce memory pressure by reusing allocations when + // possible. However, it can only have a limited effect. The real solution is to encourage grammar + // authors to think more carefully about their grammar and to use the new antlr.stats tag to inspect + // what is happening at runtime, along with using the error listener to report ambiguities. + input.Seek(index) input.Release(m) }() @@ -113,7 +141,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou if outerContext == nil { outerContext = ParserRuleContextEmpty } - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("predictATN decision " + strconv.Itoa(dfa.decision) + " exec LA(1)==" + p.getLookaheadName(input) + ", outerContext=" + outerContext.String(p.parser.GetRuleNames(), nil)) @@ -142,47 +170,52 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou p.atn.stateMu.Unlock() } - alt := p.execATN(dfa, s0, input, index, outerContext) - if ParserATNSimulatorDebug { + alt, re := p.execATN(dfa, s0, input, index, outerContext) + parser.SetError(re) + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("DFA after predictATN: " + dfa.String(p.parser.GetLiteralNames(), nil)) } return alt } -// Performs ATN simulation to compute a predicted alternative based -// upon the remaining input, but also updates the DFA cache to avoid -// having to traverse the ATN again for the same input sequence. - +// execATN performs ATN simulation to compute a predicted alternative based +// upon the remaining input, but also updates the DFA cache to avoid +// having to traverse the ATN again for the same input sequence. +// // There are some key conditions we're looking for after computing a new // set of ATN configs (proposed DFA state): -// if the set is empty, there is no viable alternative for current symbol -// does the state uniquely predict an alternative? -// does the state have a conflict that would prevent us from -// putting it on the work list? - +// +// - If the set is empty, there is no viable alternative for current symbol +// - Does the state uniquely predict an alternative? +// - Does the state have a conflict that would prevent us from +// putting it on the work list? +// // We also have some key operations to do: -// add an edge from previous DFA state to potentially NewDFA state, D, -// upon current symbol but only if adding to work list, which means in all -// cases except no viable alternative (and possibly non-greedy decisions?) -// collecting predicates and adding semantic context to DFA accept states -// adding rule context to context-sensitive DFA accept states -// consuming an input symbol -// Reporting a conflict -// Reporting an ambiguity -// Reporting a context sensitivity -// Reporting insufficient predicates - -// cover these cases: // -// dead end -// single alt -// single alt + preds -// conflict -// conflict + preds -func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext ParserRuleContext) int { - - if ParserATNSimulatorDebug || ParserATNSimulatorTraceATNSim { +// - Add an edge from previous DFA state to potentially NewDFA state, D, +// - Upon current symbol but only if adding to work list, which means in all +// cases except no viable alternative (and possibly non-greedy decisions?) +// - Collecting predicates and adding semantic context to DFA accept states +// - adding rule context to context-sensitive DFA accept states +// - Consuming an input symbol +// - Reporting a conflict +// - Reporting an ambiguity +// - Reporting a context sensitivity +// - Reporting insufficient predicates +// +// Cover these cases: +// +// - dead end +// - single alt +// - single alt + predicates +// - conflict +// - conflict + predicates +// +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) { + + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) + ", DFA state " + s0.String() + ", LA(1)==" + p.getLookaheadName(input) + @@ -191,7 +224,7 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, previousD := s0 - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("s0 = " + s0.String()) } t := input.LA(1) @@ -214,17 +247,17 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, input.Seek(startIndex) alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext) if alt != ATNInvalidAltNumber { - return alt + return alt, nil } - - panic(e) + p.parser.SetError(e) + return ATNInvalidAltNumber, e } if D.requiresFullContext && p.predictionMode != PredictionModeSLL { // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error) - conflictingAlts := D.configs.GetConflictingAlts() + conflictingAlts := D.configs.conflictingAlts if D.predicates != nil { - if ParserATNSimulatorDebug { - fmt.Println("DFA state has preds in DFA sim LL failover") + if runtimeConfig.parserATNSimulatorDebug { + fmt.Println("DFA state has preds in DFA sim LL fail-over") } conflictIndex := input.Index() if conflictIndex != startIndex { @@ -232,10 +265,10 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, } conflictingAlts = p.evalSemanticContext(D.predicates, outerContext, true) if conflictingAlts.length() == 1 { - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("Full LL avoided") } - return conflictingAlts.minValue() + return conflictingAlts.minValue(), nil } if conflictIndex != startIndex { // restore the index so Reporting the fallback to full @@ -243,18 +276,18 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, input.Seek(conflictIndex) } } - if ParserATNSimulatorDFADebug { + if runtimeConfig.parserATNSimulatorDFADebug { fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String()) } fullCtx := true s0Closure := p.computeStartState(dfa.atnStartState, outerContext, fullCtx) p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index()) - alt := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext) - return alt + alt, re := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext) + return alt, re } if D.isAcceptState { if D.predicates == nil { - return D.prediction + return D.prediction, nil } stopIndex := input.Index() input.Seek(startIndex) @@ -262,13 +295,13 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, switch alts.length() { case 0: - panic(p.noViableAlt(input, outerContext, D.configs, startIndex)) + return ATNInvalidAltNumber, p.noViableAlt(input, outerContext, D.configs, startIndex) case 1: - return alts.minValue() + return alts.minValue(), nil default: // Report ambiguity after predicate evaluation to make sure the correct set of ambig alts is Reported. p.ReportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs) - return alts.minValue() + return alts.minValue(), nil } } previousD = D @@ -314,7 +347,8 @@ func (p *ParserATNSimulator) getExistingTargetState(previousD *DFAState, t int) // @return The computed target DFA state for the given input symbol // {@code t}. If {@code t} does not lead to a valid DFA state, p method // returns {@link //ERROR}. - +// +//goland:noinspection GoBoolExpressions func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t int) *DFAState { reach := p.computeReachSet(previousD.configs, t, false) @@ -322,12 +356,12 @@ func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t p.addDFAEdge(dfa, previousD, t, ATNSimulatorError) return ATNSimulatorError } - // create Newtarget state we'll add to DFA after it's complete + // create new target state we'll add to DFA after it's complete D := NewDFAState(-1, reach) predictedAlt := p.getUniqueAlt(reach) - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { altSubSets := PredictionModegetConflictingAltSubsets(reach) fmt.Println("SLL altSubSets=" + fmt.Sprint(altSubSets) + ", previous=" + previousD.configs.String() + @@ -340,17 +374,17 @@ func (p *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState, t if predictedAlt != ATNInvalidAltNumber { // NO CONFLICT, UNIQUELY PREDICTED ALT D.isAcceptState = true - D.configs.SetUniqueAlt(predictedAlt) + D.configs.uniqueAlt = predictedAlt D.setPrediction(predictedAlt) } else if PredictionModehasSLLConflictTerminatingPrediction(p.predictionMode, reach) { // MORE THAN ONE VIABLE ALTERNATIVE - D.configs.SetConflictingAlts(p.getConflictingAlts(reach)) + D.configs.conflictingAlts = p.getConflictingAlts(reach) D.requiresFullContext = true // in SLL-only mode, we will stop at p state and return the minimum alt D.isAcceptState = true - D.setPrediction(D.configs.GetConflictingAlts().minValue()) + D.setPrediction(D.configs.conflictingAlts.minValue()) } - if D.isAcceptState && D.configs.HasSemanticContext() { + if D.isAcceptState && D.configs.hasSemanticContext { p.predicateDFAState(D, p.atn.getDecisionState(dfa.decision)) if D.predicates != nil { D.setPrediction(ATNInvalidAltNumber) @@ -381,15 +415,17 @@ func (p *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionState } // comes back with reach.uniqueAlt set to a valid alt -func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 ATNConfigSet, input TokenStream, startIndex int, outerContext ParserRuleContext) int { +// +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 *ATNConfigSet, input TokenStream, startIndex int, outerContext ParserRuleContext) (int, RecognitionException) { - if ParserATNSimulatorDebug || ParserATNSimulatorTraceATNSim { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("execATNWithFullContext " + s0.String()) } fullCtx := true foundExactAmbig := false - var reach ATNConfigSet + var reach *ATNConfigSet previous := s0 input.Seek(startIndex) t := input.LA(1) @@ -407,25 +443,23 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT // ATN states in SLL implies LL will also get nowhere. // If conflict in states that dip out, choose min since we // will get error no matter what. - e := p.noViableAlt(input, outerContext, previous, startIndex) input.Seek(startIndex) alt := p.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext) if alt != ATNInvalidAltNumber { - return alt + return alt, nil } - - panic(e) + return alt, p.noViableAlt(input, outerContext, previous, startIndex) } altSubSets := PredictionModegetConflictingAltSubsets(reach) - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("LL altSubSets=" + fmt.Sprint(altSubSets) + ", predict=" + strconv.Itoa(PredictionModegetUniqueAlt(altSubSets)) + ", resolvesToJustOneViableAlt=" + fmt.Sprint(PredictionModeresolvesToJustOneViableAlt(altSubSets))) } - reach.SetUniqueAlt(p.getUniqueAlt(reach)) + reach.uniqueAlt = p.getUniqueAlt(reach) // unique prediction? - if reach.GetUniqueAlt() != ATNInvalidAltNumber { - predictedAlt = reach.GetUniqueAlt() + if reach.uniqueAlt != ATNInvalidAltNumber { + predictedAlt = reach.uniqueAlt break } if p.predictionMode != PredictionModeLLExactAmbigDetection { @@ -454,9 +488,9 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT // If the configuration set uniquely predicts an alternative, // without conflict, then we know that it's a full LL decision // not SLL. - if reach.GetUniqueAlt() != ATNInvalidAltNumber { + if reach.uniqueAlt != ATNInvalidAltNumber { p.ReportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.Index()) - return predictedAlt + return predictedAlt, nil } // We do not check predicates here because we have checked them // on-the-fly when doing full context prediction. @@ -469,10 +503,10 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT // // For example, we might know that we have conflicting configurations. // But, that does not mean that there is no way forward without a - // conflict. It's possible to have nonconflicting alt subsets as in: - + // conflict. It's possible to have non-conflicting alt subsets as in: + // // altSubSets=[{1, 2}, {1, 2}, {1}, {1, 2}] - + // // from // // [(17,1,[5 $]), (13,1,[5 10 $]), (21,1,[5 10 $]), (11,1,[$]), @@ -487,14 +521,15 @@ func (p *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 AT p.ReportAmbiguity(dfa, D, startIndex, input.Index(), foundExactAmbig, reach.Alts(), reach) - return predictedAlt + return predictedAlt, nil } -func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCtx bool) ATNConfigSet { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fullCtx bool) *ATNConfigSet { if p.mergeCache == nil { - p.mergeCache = NewDoubleDict() + p.mergeCache = NewJPCMap(ReachSetCollection, "Merge cache for computeReachSet()") } - intermediate := NewBaseATNConfigSet(fullCtx) + intermediate := NewATNConfigSet(fullCtx) // Configurations already in a rule stop state indicate reaching the end // of the decision rule (local context) or end of the start rule (full @@ -506,18 +541,18 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt // ensure that the alternative Matching the longest overall sequence is // chosen when multiple such configurations can Match the input. - var skippedStopStates []*BaseATNConfig + var skippedStopStates []*ATNConfig // First figure out where we can reach on input t - for _, c := range closure.GetItems() { - if ParserATNSimulatorDebug { + for _, c := range closure.configs { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("testing " + p.GetTokenName(t) + " at " + c.String()) } if _, ok := c.GetState().(*RuleStopState); ok { if fullCtx || t == TokenEOF { - skippedStopStates = append(skippedStopStates, c.(*BaseATNConfig)) - if ParserATNSimulatorDebug { + skippedStopStates = append(skippedStopStates, c) + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("added " + c.String() + " to SkippedStopStates") } } @@ -527,9 +562,9 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt for _, trans := range c.GetState().GetTransitions() { target := p.getReachableTarget(trans, t) if target != nil { - cfg := NewBaseATNConfig4(c, target) + cfg := NewATNConfig4(c, target) intermediate.Add(cfg, p.mergeCache) - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("added " + cfg.String() + " to intermediate") } } @@ -537,7 +572,7 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt } // Now figure out where the reach operation can take us... - var reach ATNConfigSet + var reach *ATNConfigSet // This block optimizes the reach operation for intermediate sets which // trivially indicate a termination state for the overall @@ -565,8 +600,8 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt // operation on the intermediate set to compute its initial value. // if reach == nil { - reach = NewBaseATNConfigSet(fullCtx) - closureBusy := NewJStore[ATNConfig, Comparator[ATNConfig]](aConfEqInst) + reach = NewATNConfigSet(fullCtx) + closureBusy := NewClosureBusy("ParserATNSimulator.computeReachSet() make a closureBusy") treatEOFAsEpsilon := t == TokenEOF amount := len(intermediate.configs) for k := 0; k < amount; k++ { @@ -588,10 +623,10 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt // // This is handled before the configurations in SkippedStopStates, // because any configurations potentially added from that list are - // already guaranteed to meet p condition whether or not it's + // already guaranteed to meet this condition whether it's // required. // - reach = p.removeAllConfigsNotInRuleStopState(reach, reach == intermediate) + reach = p.removeAllConfigsNotInRuleStopState(reach, reach.Equals(intermediate)) } // If SkippedStopStates!=nil, then it contains at least one // configuration. For full-context reach operations, these @@ -607,41 +642,40 @@ func (p *ParserATNSimulator) computeReachSet(closure ATNConfigSet, t int, fullCt } } - if ParserATNSimulatorTraceATNSim { + if runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("computeReachSet " + closure.String() + " -> " + reach.String()) } - if len(reach.GetItems()) == 0 { + if len(reach.configs) == 0 { return nil } return reach } -// Return a configuration set containing only the configurations from -// {@code configs} which are in a {@link RuleStopState}. If all -// configurations in {@code configs} are already in a rule stop state, p -// method simply returns {@code configs}. +// removeAllConfigsNotInRuleStopState returns a configuration set containing only the configurations from +// configs which are in a [RuleStopState]. If all +// configurations in configs are already in a rule stop state, this +// method simply returns configs. // -//

When {@code lookToEndOfRule} is true, p method uses -// {@link ATN//NextTokens} for each configuration in {@code configs} which is +// When lookToEndOfRule is true, this method uses +// [ATN].[NextTokens] for each configuration in configs which is // not already in a rule stop state to see if a rule stop state is reachable -// from the configuration via epsilon-only transitions.

+// from the configuration via epsilon-only transitions. // -// @param configs the configuration set to update -// @param lookToEndOfRule when true, p method checks for rule stop states +// When lookToEndOfRule is true, this method checks for rule stop states // reachable by epsilon-only transitions from each configuration in -// {@code configs}. +// configs. // -// @return {@code configs} if all configurations in {@code configs} are in a -// rule stop state, otherwise return a Newconfiguration set containing only -// the configurations from {@code configs} which are in a rule stop state -func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs ATNConfigSet, lookToEndOfRule bool) ATNConfigSet { +// The func returns configs if all configurations in configs are in a +// rule stop state, otherwise it returns a new configuration set containing only +// the configurations from configs which are in a rule stop state +func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs *ATNConfigSet, lookToEndOfRule bool) *ATNConfigSet { if PredictionModeallConfigsInRuleStopStates(configs) { return configs } - result := NewBaseATNConfigSet(configs.FullContext()) - for _, config := range configs.GetItems() { + result := NewATNConfigSet(configs.fullCtx) + for _, config := range configs.configs { if _, ok := config.GetState().(*RuleStopState); ok { result.Add(config, p.mergeCache) continue @@ -650,91 +684,81 @@ func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs ATNConfi NextTokens := p.atn.NextTokens(config.GetState(), nil) if NextTokens.contains(TokenEpsilon) { endOfRuleState := p.atn.ruleToStopState[config.GetState().GetRuleIndex()] - result.Add(NewBaseATNConfig4(config, endOfRuleState), p.mergeCache) + result.Add(NewATNConfig4(config, endOfRuleState), p.mergeCache) } } } return result } -func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, fullCtx bool) ATNConfigSet { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, fullCtx bool) *ATNConfigSet { // always at least the implicit call to start rule initialContext := predictionContextFromRuleContext(p.atn, ctx) - configs := NewBaseATNConfigSet(fullCtx) - if ParserATNSimulatorDebug || ParserATNSimulatorTraceATNSim { + configs := NewATNConfigSet(fullCtx) + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("computeStartState from ATN state " + a.String() + " initialContext=" + initialContext.String()) } for i := 0; i < len(a.GetTransitions()); i++ { target := a.GetTransitions()[i].getTarget() - c := NewBaseATNConfig6(target, i+1, initialContext) - closureBusy := NewJStore[ATNConfig, Comparator[ATNConfig]](atnConfCompInst) + c := NewATNConfig6(target, i+1, initialContext) + closureBusy := NewClosureBusy("ParserATNSimulator.computeStartState() make a closureBusy") p.closure(c, configs, closureBusy, true, fullCtx, false) } return configs } -// This method transforms the start state computed by -// {@link //computeStartState} to the special start state used by a -// precedence DFA for a particular precedence value. The transformation +// applyPrecedenceFilter transforms the start state computed by +// [computeStartState] to the special start state used by a +// precedence [DFA] for a particular precedence value. The transformation // process applies the following changes to the start state's configuration // set. // -//
    -//
  1. Evaluate the precedence predicates for each configuration using -// {@link SemanticContext//evalPrecedence}.
  2. -//
  3. Remove all configurations which predict an alternative greater than -// 1, for which another configuration that predicts alternative 1 is in the -// same ATN state with the same prediction context. This transformation is -// valid for the following reasons: -//
      -//
    • The closure block cannot contain any epsilon transitions which bypass -// the body of the closure, so all states reachable via alternative 1 are -// part of the precedence alternatives of the transformed left-recursive -// rule.
    • -//
    • The "primary" portion of a left recursive rule cannot contain an -// epsilon transition, so the only way an alternative other than 1 can exist -// in a state that is also reachable via alternative 1 is by nesting calls -// to the left-recursive rule, with the outer calls not being at the -// preferred precedence level.
    • -//
    -//
  4. -//
+// 1. Evaluate the precedence predicates for each configuration using +// [SemanticContext].evalPrecedence. +// 2. Remove all configurations which predict an alternative greater than +// 1, for which another configuration that predicts alternative 1 is in the +// same ATN state with the same prediction context. +// +// Transformation 2 is valid for the following reasons: +// +// - The closure block cannot contain any epsilon transitions which bypass +// the body of the closure, so all states reachable via alternative 1 are +// part of the precedence alternatives of the transformed left-recursive +// rule. +// - The "primary" portion of a left recursive rule cannot contain an +// epsilon transition, so the only way an alternative other than 1 can exist +// in a state that is also reachable via alternative 1 is by nesting calls +// to the left-recursive rule, with the outer calls not being at the +// preferred precedence level. +// +// The prediction context must be considered by this filter to address +// situations like the following: +// +// grammar TA +// prog: statement* EOF +// statement: letterA | statement letterA 'b' +// letterA: 'a' // -//

-// The prediction context must be considered by p filter to address -// situations like the following. -//

-// -//
-// grammar TA
-// prog: statement* EOF
-// statement: letterA | statement letterA 'b'
-// letterA: 'a'
-// 
-//
-//

-// If the above grammar, the ATN state immediately before the token -// reference {@code 'a'} in {@code letterA} is reachable from the left edge +// In the above grammar, the [ATN] state immediately before the token +// reference 'a' in letterA is reachable from the left edge // of both the primary and closure blocks of the left-recursive rule -// {@code statement}. The prediction context associated with each of these +// statement. The prediction context associated with each of these // configurations distinguishes between them, and prevents the alternative -// which stepped out to {@code prog} (and then back in to {@code statement} +// which stepped out to prog, and then back in to statement // from being eliminated by the filter. -//

// -// @param configs The configuration set computed by -// {@link //computeStartState} as the start state for the DFA. -// @return The transformed configuration set representing the start state -// for a precedence DFA at a particular precedence level (determined by -// calling {@link Parser//getPrecedence}). -func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConfigSet { +// The func returns the transformed configuration set representing the start state +// for a precedence [DFA] at a particular precedence level (determined by +// calling [Parser].getPrecedence). +func (p *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *ATNConfigSet { - statesFromAlt1 := make(map[int]PredictionContext) - configSet := NewBaseATNConfigSet(configs.FullContext()) + statesFromAlt1 := make(map[int]*PredictionContext) + configSet := NewATNConfigSet(configs.fullCtx) - for _, config := range configs.GetItems() { + for _, config := range configs.configs { // handle alt 1 first if config.GetAlt() != 1 { continue @@ -746,12 +770,12 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf } statesFromAlt1[config.GetState().GetStateNumber()] = config.GetContext() if updatedContext != config.GetSemanticContext() { - configSet.Add(NewBaseATNConfig2(config, updatedContext), p.mergeCache) + configSet.Add(NewATNConfig2(config, updatedContext), p.mergeCache) } else { configSet.Add(config, p.mergeCache) } } - for _, config := range configs.GetItems() { + for _, config := range configs.configs { if config.GetAlt() == 1 { // already handled @@ -780,10 +804,11 @@ func (p *ParserATNSimulator) getReachableTarget(trans Transition, ttype int) ATN return nil } -func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs ATNConfigSet, nalts int) []SemanticContext { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs *ATNConfigSet, nalts int) []SemanticContext { altToPred := make([]SemanticContext, nalts+1) - for _, c := range configs.GetItems() { + for _, c := range configs.configs { if ambigAlts.contains(c.GetAlt()) { altToPred[c.GetAlt()] = SemanticContextorContext(altToPred[c.GetAlt()], c.GetSemanticContext()) } @@ -797,11 +822,11 @@ func (p *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs ATN nPredAlts++ } } - // nonambig alts are nil in altToPred + // unambiguous alts are nil in altToPred if nPredAlts == 0 { altToPred = nil } - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("getPredsForAmbigAlts result " + fmt.Sprint(altToPred)) } return altToPred @@ -812,7 +837,7 @@ func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPre containsPredicate := false for i := 1; i < len(altToPred); i++ { pred := altToPred[i] - // unpredicated is indicated by SemanticContextNONE + // un-predicated is indicated by SemanticContextNONE if ambigAlts != nil && ambigAlts.contains(i) { pairs = append(pairs, NewPredPrediction(pred, i)) } @@ -826,51 +851,42 @@ func (p *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPre return pairs } -// This method is used to improve the localization of error messages by -// choosing an alternative rather than panicing a -// {@link NoViableAltException} in particular prediction scenarios where the -// {@link //ERROR} state was reached during ATN simulation. +// getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule is used to improve the localization of error messages by +// choosing an alternative rather than panic a NoViableAltException in particular prediction scenarios where the +// Error state was reached during [ATN] simulation. // -//

-// The default implementation of p method uses the following -// algorithm to identify an ATN configuration which successfully parsed the +// The default implementation of this method uses the following +// algorithm to identify an [ATN] configuration which successfully parsed the // decision entry rule. Choosing such an alternative ensures that the -// {@link ParserRuleContext} returned by the calling rule will be complete +// [ParserRuleContext] returned by the calling rule will be complete // and valid, and the syntax error will be Reported later at a more -// localized location.

+// localized location. // -//
    -//
  • If a syntactically valid path or paths reach the end of the decision rule and -// they are semantically valid if predicated, return the min associated alt.
  • -//
  • Else, if a semantically invalid but syntactically valid path exist -// or paths exist, return the minimum associated alt. -//
  • -//
  • Otherwise, return {@link ATN//INVALID_ALT_NUMBER}.
  • -//
+// - If a syntactically valid path or paths reach the end of the decision rule, and +// they are semantically valid if predicated, return the min associated alt. +// - Else, if a semantically invalid but syntactically valid path exist +// or paths exist, return the minimum associated alt. +// - Otherwise, return [ATNInvalidAltNumber]. // -//

// In some scenarios, the algorithm described above could predict an -// alternative which will result in a {@link FailedPredicateException} in -// the parser. Specifically, p could occur if the only configuration +// alternative which will result in a [FailedPredicateException] in +// the parser. Specifically, this could occur if the only configuration // capable of successfully parsing to the end of the decision rule is -// blocked by a semantic predicate. By choosing p alternative within -// {@link //AdaptivePredict} instead of panicing a -// {@link NoViableAltException}, the resulting -// {@link FailedPredicateException} in the parser will identify the specific +// blocked by a semantic predicate. By choosing this alternative within +// [AdaptivePredict] instead of panic a [NoViableAltException], the resulting +// [FailedPredicateException] in the parser will identify the specific // predicate which is preventing the parser from successfully parsing the // decision rule, which helps developers identify and correct logic errors // in semantic predicates. -//

// -// @param configs The ATN configurations which were valid immediately before -// the {@link //ERROR} state was reached -// @param outerContext The is the \gamma_0 initial parser context from the paper +// pass in the configs holding ATN configurations which were valid immediately before +// the ERROR state was reached, outerContext as the initial parser context from the paper // or the parser stack at the instant before prediction commences. // -// @return The value to return from {@link //AdaptivePredict}, or -// {@link ATN//INVALID_ALT_NUMBER} if a suitable alternative was not -// identified and {@link //AdaptivePredict} should Report an error instead. -func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs ATNConfigSet, outerContext ParserRuleContext) int { +// Teh func returns the value to return from [AdaptivePredict], or +// [ATNInvalidAltNumber] if a suitable alternative was not +// identified and [AdaptivePredict] should report an error instead. +func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs *ATNConfigSet, outerContext ParserRuleContext) int { cfgs := p.splitAccordingToSemanticValidity(configs, outerContext) semValidConfigs := cfgs[0] semInvalidConfigs := cfgs[1] @@ -879,7 +895,7 @@ func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntry return alt } // Is there a syntactically valid path with a failed pred? - if len(semInvalidConfigs.GetItems()) > 0 { + if len(semInvalidConfigs.configs) > 0 { alt = p.GetAltThatFinishedDecisionEntryRule(semInvalidConfigs) if alt != ATNInvalidAltNumber { // syntactically viable path exists return alt @@ -888,10 +904,10 @@ func (p *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntry return ATNInvalidAltNumber } -func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs ATNConfigSet) int { +func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs *ATNConfigSet) int { alts := NewIntervalSet() - for _, c := range configs.GetItems() { + for _, c := range configs.configs { _, ok := c.GetState().(*RuleStopState) if c.GetReachesIntoOuterContext() > 0 || (ok && c.GetContext().hasEmptyPath()) { @@ -915,14 +931,14 @@ func (p *ParserATNSimulator) GetAltThatFinishedDecisionEntryRule(configs ATNConf // prediction, which is where predicates need to evaluate. type ATNConfigSetPair struct { - item0, item1 ATNConfigSet + item0, item1 *ATNConfigSet } -func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs ATNConfigSet, outerContext ParserRuleContext) []ATNConfigSet { - succeeded := NewBaseATNConfigSet(configs.FullContext()) - failed := NewBaseATNConfigSet(configs.FullContext()) +func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs *ATNConfigSet, outerContext ParserRuleContext) []*ATNConfigSet { + succeeded := NewATNConfigSet(configs.fullCtx) + failed := NewATNConfigSet(configs.fullCtx) - for _, c := range configs.GetItems() { + for _, c := range configs.configs { if c.GetSemanticContext() != SemanticContextNone { predicateEvaluationResult := c.GetSemanticContext().evaluate(p.parser, outerContext) if predicateEvaluationResult { @@ -934,15 +950,16 @@ func (p *ParserATNSimulator) splitAccordingToSemanticValidity(configs ATNConfigS succeeded.Add(c, nil) } } - return []ATNConfigSet{succeeded, failed} + return []*ATNConfigSet{succeeded, failed} } -// Look through a list of predicate/alt pairs, returning alts for the +// evalSemanticContext looks through a list of predicate/alt pairs, returning alts for the +// pairs that win. A [SemanticContextNone] predicate indicates an alt containing an +// un-predicated runtimeConfig which behaves as "always true." If !complete +// then we stop at the first predicate that evaluates to true. This +// includes pairs with nil predicates. // -// pairs that win. A {@code NONE} predicate indicates an alt containing an -// unpredicated config which behaves as "always true." If !complete -// then we stop at the first predicate that evaluates to true. This -// includes pairs with nil predicates. +//goland:noinspection GoBoolExpressions func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPrediction, outerContext ParserRuleContext, complete bool) *BitSet { predictions := NewBitSet() for i := 0; i < len(predPredictions); i++ { @@ -956,11 +973,11 @@ func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPredicti } predicateEvaluationResult := pair.pred.evaluate(p.parser, outerContext) - if ParserATNSimulatorDebug || ParserATNSimulatorDFADebug { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug { fmt.Println("eval pred " + pair.String() + "=" + fmt.Sprint(predicateEvaluationResult)) } if predicateEvaluationResult { - if ParserATNSimulatorDebug || ParserATNSimulatorDFADebug { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorDFADebug { fmt.Println("PREDICT " + fmt.Sprint(pair.alt)) } predictions.add(pair.alt) @@ -972,19 +989,82 @@ func (p *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPredicti return predictions } -func (p *ParserATNSimulator) closure(config ATNConfig, configs ATNConfigSet, closureBusy *JStore[ATNConfig, Comparator[ATNConfig]], collectPredicates, fullCtx, treatEOFAsEpsilon bool) { +func (p *ParserATNSimulator) closure(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx, treatEOFAsEpsilon bool) { initialDepth := 0 p.closureCheckingStopState(config, configs, closureBusy, collectPredicates, fullCtx, initialDepth, treatEOFAsEpsilon) } -func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs ATNConfigSet, closureBusy *JStore[ATNConfig, Comparator[ATNConfig]], collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { - if ParserATNSimulatorTraceATNSim { +func (p *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { + if runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("closure(" + config.String() + ")") - //fmt.Println("configs(" + configs.String() + ")") - if config.GetReachesIntoOuterContext() > 50 { - panic("problem") + } + + var stack []*ATNConfig + visited := make(map[*ATNConfig]bool) + + stack = append(stack, config) + + for len(stack) > 0 { + currConfig := stack[len(stack)-1] + stack = stack[:len(stack)-1] + + if _, ok := visited[currConfig]; ok { + continue + } + visited[currConfig] = true + + if _, ok := currConfig.GetState().(*RuleStopState); ok { + // We hit rule end. If we have context info, use it + // run thru all possible stack tops in ctx + if !currConfig.GetContext().isEmpty() { + for i := 0; i < currConfig.GetContext().length(); i++ { + if currConfig.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState { + if fullCtx { + nb := NewATNConfig1(currConfig, currConfig.GetState(), BasePredictionContextEMPTY) + configs.Add(nb, p.mergeCache) + continue + } else { + // we have no context info, just chase follow links (if greedy) + if runtimeConfig.parserATNSimulatorDebug { + fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex())) + } + p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) + } + continue + } + returnState := p.atn.states[currConfig.GetContext().getReturnState(i)] + newContext := currConfig.GetContext().GetParent(i) // "pop" return state + + c := NewATNConfig5(returnState, currConfig.GetAlt(), newContext, currConfig.GetSemanticContext()) + // While we have context to pop back from, we may have + // gotten that context AFTER having falling off a rule. + // Make sure we track that we are now out of context. + c.SetReachesIntoOuterContext(currConfig.GetReachesIntoOuterContext()) + + stack = append(stack, c) + } + continue + } else if fullCtx { + // reached end of start rule + configs.Add(currConfig, p.mergeCache) + continue + } else { + // else if we have no context info, just chase follow links (if greedy) + if runtimeConfig.parserATNSimulatorDebug { + fmt.Println("FALLING off rule " + p.getRuleName(currConfig.GetState().GetRuleIndex())) + } + } } + + p.closureWork(currConfig, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) + } +} + +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) closureCheckingStopStateRecursive(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("closure(" + config.String() + ")") } if _, ok := config.GetState().(*RuleStopState); ok { @@ -994,11 +1074,12 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs for i := 0; i < config.GetContext().length(); i++ { if config.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState { if fullCtx { - configs.Add(NewBaseATNConfig1(config, config.GetState(), BasePredictionContextEMPTY), p.mergeCache) + nb := NewATNConfig1(config, config.GetState(), BasePredictionContextEMPTY) + configs.Add(nb, p.mergeCache) continue } else { // we have no context info, just chase follow links (if greedy) - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex())) } p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) @@ -1008,7 +1089,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs returnState := p.atn.states[config.GetContext().getReturnState(i)] newContext := config.GetContext().GetParent(i) // "pop" return state - c := NewBaseATNConfig5(returnState, config.GetAlt(), newContext, config.GetSemanticContext()) + c := NewATNConfig5(returnState, config.GetAlt(), newContext, config.GetSemanticContext()) // While we have context to pop back from, we may have // gotten that context AFTER having falling off a rule. // Make sure we track that we are now out of context. @@ -1022,7 +1103,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs return } else { // else if we have no context info, just chase follow links (if greedy) - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex())) } } @@ -1030,8 +1111,10 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) } -// Do the actual work of walking epsilon edges// -func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, closureBusy *JStore[ATNConfig, Comparator[ATNConfig]], collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { +// Do the actual work of walking epsilon edges +// +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) closureWork(config *ATNConfig, configs *ATNConfigSet, closureBusy *ClosureBusy, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { state := config.GetState() // optimization if !state.GetEpsilonOnlyTransitions() { @@ -1048,7 +1131,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, _, ok := t.(*ActionTransition) continueCollecting := collectPredicates && !ok c := p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon) - if ci, ok := c.(*BaseATNConfig); ok && ci != nil { + if c != nil { newDepth := depth if _, ok := config.GetState().(*RuleStopState); ok { @@ -1056,7 +1139,7 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, // We can't get here if incoming config was rule stop and we had context // track how far we dip into outer context. Might // come in handy and we avoid evaluating context dependent - // preds if p is > 0. + // preds if this is > 0. if p.dfa != nil && p.dfa.getPrecedenceDfa() { if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() { @@ -1072,9 +1155,9 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, continue } - configs.SetDipsIntoOuterContext(true) // TODO: can remove? only care when we add to set per middle of p method + configs.dipsIntoOuterContext = true // TODO: can remove? only care when we add to set per middle of this method newDepth-- - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("dips into outer ctx: " + c.String()) } } else { @@ -1098,8 +1181,9 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, } } -func (p *ParserATNSimulator) canDropLoopEntryEdgeInLeftRecursiveRule(config ATNConfig) bool { - if TurnOffLRLoopEntryBranchOpt { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) canDropLoopEntryEdgeInLeftRecursiveRule(config *ATNConfig) bool { + if !runtimeConfig.lRLoopEntryBranchOpt { return false } @@ -1196,7 +1280,7 @@ func (p *ParserATNSimulator) getRuleName(index int) string { return sb.String() } -func (p *ParserATNSimulator) getEpsilonTarget(config ATNConfig, t Transition, collectPredicates, inContext, fullCtx, treatEOFAsEpsilon bool) ATNConfig { +func (p *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t Transition, collectPredicates, inContext, fullCtx, treatEOFAsEpsilon bool) *ATNConfig { switch t.getSerializationType() { case TransitionRULE: @@ -1208,13 +1292,13 @@ func (p *ParserATNSimulator) getEpsilonTarget(config ATNConfig, t Transition, co case TransitionACTION: return p.actionTransition(config, t.(*ActionTransition)) case TransitionEPSILON: - return NewBaseATNConfig4(config, t.getTarget()) + return NewATNConfig4(config, t.getTarget()) case TransitionATOM, TransitionRANGE, TransitionSET: // EOF transitions act like epsilon transitions after the first EOF // transition is traversed if treatEOFAsEpsilon { if t.Matches(TokenEOF, 0, 1) { - return NewBaseATNConfig4(config, t.getTarget()) + return NewATNConfig4(config, t.getTarget()) } } return nil @@ -1223,60 +1307,63 @@ func (p *ParserATNSimulator) getEpsilonTarget(config ATNConfig, t Transition, co } } -func (p *ParserATNSimulator) actionTransition(config ATNConfig, t *ActionTransition) *BaseATNConfig { - if ParserATNSimulatorDebug { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) actionTransition(config *ATNConfig, t *ActionTransition) *ATNConfig { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("ACTION edge " + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex)) } - return NewBaseATNConfig4(config, t.getTarget()) + return NewATNConfig4(config, t.getTarget()) } -func (p *ParserATNSimulator) precedenceTransition(config ATNConfig, - pt *PrecedencePredicateTransition, collectPredicates, inContext, fullCtx bool) *BaseATNConfig { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) precedenceTransition(config *ATNConfig, + pt *PrecedencePredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig { - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " + strconv.Itoa(pt.precedence) + ">=_p, ctx dependent=true") if p.parser != nil { fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil))) } } - var c *BaseATNConfig + var c *ATNConfig if collectPredicates && inContext { if fullCtx { // In full context mode, we can evaluate predicates on-the-fly // during closure, which dramatically reduces the size of - // the config sets. It also obviates the need to test predicates + // the runtimeConfig sets. It also obviates the need to test predicates // later during conflict resolution. currentPosition := p.input.Index() p.input.Seek(p.startIndex) predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext) p.input.Seek(currentPosition) if predSucceeds { - c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context + c = NewATNConfig4(config, pt.getTarget()) // no pred context } } else { newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate()) - c = NewBaseATNConfig3(config, pt.getTarget(), newSemCtx) + c = NewATNConfig3(config, pt.getTarget(), newSemCtx) } } else { - c = NewBaseATNConfig4(config, pt.getTarget()) + c = NewATNConfig4(config, pt.getTarget()) } - if ParserATNSimulatorDebug { - fmt.Println("config from pred transition=" + c.String()) + if runtimeConfig.parserATNSimulatorDebug { + fmt.Println("runtimeConfig from pred transition=" + c.String()) } return c } -func (p *ParserATNSimulator) predTransition(config ATNConfig, pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *BaseATNConfig { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) predTransition(config *ATNConfig, pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig { - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " + strconv.Itoa(pt.ruleIndex) + ":" + strconv.Itoa(pt.predIndex) + ", ctx dependent=" + fmt.Sprint(pt.isCtxDependent)) if p.parser != nil { fmt.Println("context surrounding pred is " + fmt.Sprint(p.parser.GetRuleInvocationStack(nil))) } } - var c *BaseATNConfig + var c *ATNConfig if collectPredicates && (!pt.isCtxDependent || inContext) { if fullCtx { // In full context mode, we can evaluate predicates on-the-fly @@ -1288,78 +1375,92 @@ func (p *ParserATNSimulator) predTransition(config ATNConfig, pt *PredicateTrans predSucceeds := pt.getPredicate().evaluate(p.parser, p.outerContext) p.input.Seek(currentPosition) if predSucceeds { - c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context + c = NewATNConfig4(config, pt.getTarget()) // no pred context } } else { newSemCtx := SemanticContextandContext(config.GetSemanticContext(), pt.getPredicate()) - c = NewBaseATNConfig3(config, pt.getTarget(), newSemCtx) + c = NewATNConfig3(config, pt.getTarget(), newSemCtx) } } else { - c = NewBaseATNConfig4(config, pt.getTarget()) + c = NewATNConfig4(config, pt.getTarget()) } - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("config from pred transition=" + c.String()) } return c } -func (p *ParserATNSimulator) ruleTransition(config ATNConfig, t *RuleTransition) *BaseATNConfig { - if ParserATNSimulatorDebug { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) ruleTransition(config *ATNConfig, t *RuleTransition) *ATNConfig { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("CALL rule " + p.getRuleName(t.getTarget().GetRuleIndex()) + ", ctx=" + config.GetContext().String()) } returnState := t.followState newContext := SingletonBasePredictionContextCreate(config.GetContext(), returnState.GetStateNumber()) - return NewBaseATNConfig1(config, t.getTarget(), newContext) + return NewATNConfig1(config, t.getTarget(), newContext) } -func (p *ParserATNSimulator) getConflictingAlts(configs ATNConfigSet) *BitSet { +func (p *ParserATNSimulator) getConflictingAlts(configs *ATNConfigSet) *BitSet { altsets := PredictionModegetConflictingAltSubsets(configs) return PredictionModeGetAlts(altsets) } -// Sam pointed out a problem with the previous definition, v3, of +// getConflictingAltsOrUniqueAlt Sam pointed out a problem with the previous definition, v3, of // ambiguous states. If we have another state associated with conflicting // alternatives, we should keep going. For example, the following grammar // -// s : (ID | ID ID?) '' +// s : (ID | ID ID?) ; +// +// When the [ATN] simulation reaches the state before ;, it has a [DFA] +// state that looks like: +// +// [12|1|[], 6|2|[], 12|2|[]]. +// +// Naturally +// +// 12|1|[] and 12|2|[] +// +// conflict, but we cannot stop processing this node +// because alternative to has another way to continue, via +// +// [6|2|[]]. // -// When the ATN simulation reaches the state before '', it has a DFA -// state that looks like: [12|1|[], 6|2|[], 12|2|[]]. Naturally -// 12|1|[] and 12|2|[] conflict, but we cannot stop processing p node -// because alternative to has another way to continue, via [6|2|[]]. // The key is that we have a single state that has config's only associated // with a single alternative, 2, and crucially the state transitions // among the configurations are all non-epsilon transitions. That means // we don't consider any conflicts that include alternative 2. So, we // ignore the conflict between alts 1 and 2. We ignore a set of // conflicting alts when there is an intersection with an alternative -// associated with a single alt state in the state&rarrconfig-list map. +// associated with a single alt state in the state config-list map. // // It's also the case that we might have two conflicting configurations but -// also a 3rd nonconflicting configuration for a different alternative: -// [1|1|[], 1|2|[], 8|3|[]]. This can come about from grammar: +// also a 3rd non-conflicting configuration for a different alternative: +// +// [1|1|[], 1|2|[], 8|3|[]]. +// +// This can come about from grammar: // -// a : A | A | A B +// a : A | A | A B // // After Matching input A, we reach the stop state for rule A, state 1. // State 8 is the state right before B. Clearly alternatives 1 and 2 // conflict and no amount of further lookahead will separate the two. -// However, alternative 3 will be able to continue and so we do not -// stop working on p state. In the previous example, we're concerned +// However, alternative 3 will be able to continue, so we do not +// stop working on this state. +// +// In the previous example, we're concerned // with states associated with the conflicting alternatives. Here alt // 3 is not associated with the conflicting configs, but since we can continue // looking for input reasonably, I don't declare the state done. We // ignore a set of conflicting alts when we have an alternative // that we still need to pursue. -// - -func (p *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs ATNConfigSet) *BitSet { +func (p *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs *ATNConfigSet) *BitSet { var conflictingAlts *BitSet - if configs.GetUniqueAlt() != ATNInvalidAltNumber { + if configs.uniqueAlt != ATNInvalidAltNumber { conflictingAlts = NewBitSet() - conflictingAlts.add(configs.GetUniqueAlt()) + conflictingAlts.add(configs.uniqueAlt) } else { - conflictingAlts = configs.GetConflictingAlts() + conflictingAlts = configs.conflictingAlts } return conflictingAlts } @@ -1384,11 +1485,10 @@ func (p *ParserATNSimulator) getLookaheadName(input TokenStream) string { return p.GetTokenName(input.LA(1)) } -// Used for debugging in AdaptivePredict around execATN but I cut -// -// it out for clarity now that alg. works well. We can leave p -// "dead" code for a bit. -func (p *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) { +// Used for debugging in [AdaptivePredict] around [execATN], but I cut +// it out for clarity now that alg. works well. We can leave this +// "dead" code for a bit. +func (p *ParserATNSimulator) dumpDeadEndConfigs(_ *NoViableAltException) { panic("Not implemented") @@ -1418,13 +1518,13 @@ func (p *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) { // } } -func (p *ParserATNSimulator) noViableAlt(input TokenStream, outerContext ParserRuleContext, configs ATNConfigSet, startIndex int) *NoViableAltException { +func (p *ParserATNSimulator) noViableAlt(input TokenStream, outerContext ParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException { return NewNoViableAltException(p.parser, input, input.Get(startIndex), input.LT(1), configs, outerContext) } -func (p *ParserATNSimulator) getUniqueAlt(configs ATNConfigSet) int { +func (p *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int { alt := ATNInvalidAltNumber - for _, c := range configs.GetItems() { + for _, c := range configs.configs { if alt == ATNInvalidAltNumber { alt = c.GetAlt() // found first alt } else if c.GetAlt() != alt { @@ -1452,8 +1552,10 @@ func (p *ParserATNSimulator) getUniqueAlt(configs ATNConfigSet) int { // @return If {@code to} is {@code nil}, p method returns {@code nil} // otherwise p method returns the result of calling {@link //addDFAState} // on {@code to} +// +//goland:noinspection GoBoolExpressions func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFAState) *DFAState { - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + p.GetTokenName(t)) } if to == nil { @@ -1472,7 +1574,7 @@ func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFA from.setIthEdge(t+1, to) // connect p.atn.edgeMu.Unlock() - if ParserATNSimulatorDebug { + if runtimeConfig.parserATNSimulatorDebug { var names []string if p.parser != nil { names = p.parser.GetLiteralNames() @@ -1483,48 +1585,49 @@ func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFA return to } -// Add state {@code D} to the DFA if it is not already present, and return -// the actual instance stored in the DFA. If a state equivalent to {@code D} -// is already in the DFA, the existing state is returned. Otherwise p -// method returns {@code D} after adding it to the DFA. +// addDFAState adds state D to the [DFA] if it is not already present, and returns +// the actual instance stored in the [DFA]. If a state equivalent to D +// is already in the [DFA], the existing state is returned. Otherwise, this +// method returns D after adding it to the [DFA]. // -//

If {@code D} is {@link //ERROR}, p method returns {@link //ERROR} and -// does not change the DFA.

+// If D is [ATNSimulatorError], this method returns [ATNSimulatorError] and +// does not change the DFA. // -// @param dfa The dfa -// @param D The DFA state to add -// @return The state stored in the DFA. This will be either the existing -// state if {@code D} is already in the DFA, or {@code D} itself if the -// state was not already present. +//goland:noinspection GoBoolExpressions func (p *ParserATNSimulator) addDFAState(dfa *DFA, d *DFAState) *DFAState { if d == ATNSimulatorError { return d } - existing, present := dfa.states.Get(d) + + existing, present := dfa.Get(d) if present { - if ParserATNSimulatorTraceATNSim { + if runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Print("addDFAState " + d.String() + " exists") } return existing } - // The state was not present, so update it with configs + // The state will be added if not already there or we will be given back the existing state struct + // if it is present. // - d.stateNumber = dfa.states.Len() - if !d.configs.ReadOnly() { - d.configs.OptimizeConfigs(p.BaseATNSimulator) - d.configs.SetReadOnly(true) + d.stateNumber = dfa.Len() + if !d.configs.readOnly { + d.configs.OptimizeConfigs(&p.BaseATNSimulator) + d.configs.readOnly = true + d.configs.configLookup = nil } - dfa.states.Put(d) - if ParserATNSimulatorTraceATNSim { + dfa.Put(d) + + if runtimeConfig.parserATNSimulatorTraceATNSim { fmt.Println("addDFAState new " + d.String()) } return d } -func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs ATNConfigSet, startIndex, stopIndex int) { - if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs *ATNConfigSet, startIndex, stopIndex int) { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug { interval := NewInterval(startIndex, stopIndex+1) fmt.Println("ReportAttemptingFullContext decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() + ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval)) @@ -1534,8 +1637,9 @@ func (p *ParserATNSimulator) ReportAttemptingFullContext(dfa *DFA, conflictingAl } } -func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, configs ATNConfigSet, startIndex, stopIndex int) { - if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug { +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, configs *ATNConfigSet, startIndex, stopIndex int) { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug { interval := NewInterval(startIndex, stopIndex+1) fmt.Println("ReportContextSensitivity decision=" + strconv.Itoa(dfa.decision) + ":" + configs.String() + ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval)) @@ -1545,10 +1649,15 @@ func (p *ParserATNSimulator) ReportContextSensitivity(dfa *DFA, prediction int, } } -// If context sensitive parsing, we know it's ambiguity not conflict// -func (p *ParserATNSimulator) ReportAmbiguity(dfa *DFA, D *DFAState, startIndex, stopIndex int, - exact bool, ambigAlts *BitSet, configs ATNConfigSet) { - if ParserATNSimulatorDebug || ParserATNSimulatorRetryDebug { +// ReportAmbiguity reports and ambiguity in the parse, which shows that the parser will explore a different route. +// +// If context-sensitive parsing, we know it's an ambiguity not a conflict or error, but we can report it to the developer +// so that they can see that this is happening and can take action if they want to. +// +//goland:noinspection GoBoolExpressions +func (p *ParserATNSimulator) ReportAmbiguity(dfa *DFA, _ *DFAState, startIndex, stopIndex int, + exact bool, ambigAlts *BitSet, configs *ATNConfigSet) { + if runtimeConfig.parserATNSimulatorDebug || runtimeConfig.parserATNSimulatorRetryDebug { interval := NewInterval(startIndex, stopIndex+1) fmt.Println("ReportAmbiguity " + ambigAlts.String() + ":" + configs.String() + ", input=" + p.parser.GetTokenStream().GetTextFromInterval(interval)) diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go similarity index 77% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_rule_context.go rename to vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go index 1c8cee747..c249bc138 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/parser_rule_context.go +++ b/vendor/github.com/antlr4-go/antlr/v4/parser_rule_context.go @@ -31,7 +31,9 @@ type ParserRuleContext interface { } type BaseParserRuleContext struct { - *BaseRuleContext + parentCtx RuleContext + invokingState int + RuleIndex int start, stop Token exception RecognitionException @@ -40,8 +42,22 @@ type BaseParserRuleContext struct { func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) *BaseParserRuleContext { prc := new(BaseParserRuleContext) + InitBaseParserRuleContext(prc, parent, invokingStateNumber) + return prc +} + +func InitBaseParserRuleContext(prc *BaseParserRuleContext, parent ParserRuleContext, invokingStateNumber int) { + // What context invoked b rule? + prc.parentCtx = parent - prc.BaseRuleContext = NewBaseRuleContext(parent, invokingStateNumber) + // What state invoked the rule associated with b context? + // The "return address" is the followState of invokingState + // If parent is nil, b should be -1. + if parent == nil { + prc.invokingState = -1 + } else { + prc.invokingState = invokingStateNumber + } prc.RuleIndex = -1 // * If we are debugging or building a parse tree for a Visitor, @@ -56,8 +72,6 @@ func NewBaseParserRuleContext(parent ParserRuleContext, invokingStateNumber int) // The exception that forced prc rule to return. If the rule successfully // completed, prc is {@code nil}. prc.exception = nil - - return prc } func (prc *BaseParserRuleContext) SetException(e RecognitionException) { @@ -90,14 +104,15 @@ func (prc *BaseParserRuleContext) GetText() string { return s } -// Double dispatch methods for listeners -func (prc *BaseParserRuleContext) EnterRule(listener ParseTreeListener) { +// EnterRule is called when any rule is entered. +func (prc *BaseParserRuleContext) EnterRule(_ ParseTreeListener) { } -func (prc *BaseParserRuleContext) ExitRule(listener ParseTreeListener) { +// ExitRule is called when any rule is exited. +func (prc *BaseParserRuleContext) ExitRule(_ ParseTreeListener) { } -// * Does not set parent link other add methods do that/// +// * Does not set parent link other add methods do that func (prc *BaseParserRuleContext) addTerminalNodeChild(child TerminalNode) TerminalNode { if prc.children == nil { prc.children = make([]Tree, 0) @@ -120,10 +135,9 @@ func (prc *BaseParserRuleContext) AddChild(child RuleContext) RuleContext { return child } -// * Used by EnterOuterAlt to toss out a RuleContext previously added as -// we entered a rule. If we have // label, we will need to remove -// generic ruleContext object. -// / +// RemoveLastChild is used by [EnterOuterAlt] to toss out a [RuleContext] previously added as +// we entered a rule. If we have a label, we will need to remove +// the generic ruleContext object. func (prc *BaseParserRuleContext) RemoveLastChild() { if prc.children != nil && len(prc.children) > 0 { prc.children = prc.children[0 : len(prc.children)-1] @@ -293,7 +307,7 @@ func (prc *BaseParserRuleContext) GetChildCount() int { return len(prc.children) } -func (prc *BaseParserRuleContext) GetSourceInterval() *Interval { +func (prc *BaseParserRuleContext) GetSourceInterval() Interval { if prc.start == nil || prc.stop == nil { return TreeInvalidInterval } @@ -340,6 +354,50 @@ func (prc *BaseParserRuleContext) String(ruleNames []string, stop RuleContext) s return s } +func (prc *BaseParserRuleContext) SetParent(v Tree) { + if v == nil { + prc.parentCtx = nil + } else { + prc.parentCtx = v.(RuleContext) + } +} + +func (prc *BaseParserRuleContext) GetInvokingState() int { + return prc.invokingState +} + +func (prc *BaseParserRuleContext) SetInvokingState(t int) { + prc.invokingState = t +} + +func (prc *BaseParserRuleContext) GetRuleIndex() int { + return prc.RuleIndex +} + +func (prc *BaseParserRuleContext) GetAltNumber() int { + return ATNInvalidAltNumber +} + +func (prc *BaseParserRuleContext) SetAltNumber(_ int) {} + +// IsEmpty returns true if the context of b is empty. +// +// A context is empty if there is no invoking state, meaning nobody calls +// current context. +func (prc *BaseParserRuleContext) IsEmpty() bool { + return prc.invokingState == -1 +} + +// GetParent returns the combined text of all child nodes. This method only considers +// tokens which have been added to the parse tree. +// +// Since tokens on hidden channels (e.g. whitespace or comments) are not +// added to the parse trees, they will not appear in the output of this +// method. +func (prc *BaseParserRuleContext) GetParent() Tree { + return prc.parentCtx +} + var ParserRuleContextEmpty = NewBaseParserRuleContext(nil, -1) type InterpreterRuleContext interface { @@ -350,6 +408,7 @@ type BaseInterpreterRuleContext struct { *BaseParserRuleContext } +//goland:noinspection GoUnusedExportedFunction func NewBaseInterpreterRuleContext(parent BaseInterpreterRuleContext, invokingStateNumber, ruleIndex int) *BaseInterpreterRuleContext { prc := new(BaseInterpreterRuleContext) diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go new file mode 100644 index 000000000..c1b80cc1f --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_context.go @@ -0,0 +1,727 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +import ( + "fmt" + "golang.org/x/exp/slices" + "strconv" +) + +var _emptyPredictionContextHash int + +func init() { + _emptyPredictionContextHash = murmurInit(1) + _emptyPredictionContextHash = murmurFinish(_emptyPredictionContextHash, 0) +} + +func calculateEmptyHash() int { + return _emptyPredictionContextHash +} + +const ( + // BasePredictionContextEmptyReturnState represents {@code $} in an array in full context mode, $ + // doesn't mean wildcard: + // + // $ + x = [$,x] + // + // Here, + // + // $ = EmptyReturnState + BasePredictionContextEmptyReturnState = 0x7FFFFFFF +) + +// TODO: JI These are meant to be atomics - this does not seem to match the Java runtime here +// +//goland:noinspection GoUnusedGlobalVariable +var ( + BasePredictionContextglobalNodeCount = 1 + BasePredictionContextid = BasePredictionContextglobalNodeCount +) + +const ( + PredictionContextEmpty = iota + PredictionContextSingleton + PredictionContextArray +) + +// PredictionContext is a go idiomatic implementation of PredictionContext that does not rty to +// emulate inheritance from Java, and can be used without an interface definition. An interface +// is not required because no user code will ever need to implement this interface. +type PredictionContext struct { + cachedHash int + pcType int + parentCtx *PredictionContext + returnState int + parents []*PredictionContext + returnStates []int +} + +func NewEmptyPredictionContext() *PredictionContext { + nep := &PredictionContext{} + nep.cachedHash = calculateEmptyHash() + nep.pcType = PredictionContextEmpty + nep.returnState = BasePredictionContextEmptyReturnState + return nep +} + +func NewBaseSingletonPredictionContext(parent *PredictionContext, returnState int) *PredictionContext { + pc := &PredictionContext{} + pc.pcType = PredictionContextSingleton + pc.returnState = returnState + pc.parentCtx = parent + if parent != nil { + pc.cachedHash = calculateHash(parent, returnState) + } else { + pc.cachedHash = calculateEmptyHash() + } + return pc +} + +func SingletonBasePredictionContextCreate(parent *PredictionContext, returnState int) *PredictionContext { + if returnState == BasePredictionContextEmptyReturnState && parent == nil { + // someone can pass in the bits of an array ctx that mean $ + return BasePredictionContextEMPTY + } + return NewBaseSingletonPredictionContext(parent, returnState) +} + +func NewArrayPredictionContext(parents []*PredictionContext, returnStates []int) *PredictionContext { + // Parent can be nil only if full ctx mode and we make an array + // from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using + // nil parent and + // returnState == {@link //EmptyReturnState}. + hash := murmurInit(1) + for _, parent := range parents { + hash = murmurUpdate(hash, parent.Hash()) + } + for _, returnState := range returnStates { + hash = murmurUpdate(hash, returnState) + } + hash = murmurFinish(hash, len(parents)<<1) + + nec := &PredictionContext{} + nec.cachedHash = hash + nec.pcType = PredictionContextArray + nec.parents = parents + nec.returnStates = returnStates + return nec +} + +func (p *PredictionContext) Hash() int { + return p.cachedHash +} + +func (p *PredictionContext) Equals(other Collectable[*PredictionContext]) bool { + switch p.pcType { + case PredictionContextEmpty: + otherP := other.(*PredictionContext) + return other == nil || otherP == nil || otherP.isEmpty() + case PredictionContextSingleton: + return p.SingletonEquals(other) + case PredictionContextArray: + return p.ArrayEquals(other) + } + return false +} + +func (p *PredictionContext) ArrayEquals(o Collectable[*PredictionContext]) bool { + if o == nil { + return false + } + other := o.(*PredictionContext) + if other == nil || other.pcType != PredictionContextArray { + return false + } + if p.cachedHash != other.Hash() { + return false // can't be same if hash is different + } + + // Must compare the actual array elements and not just the array address + // + return slices.Equal(p.returnStates, other.returnStates) && + slices.EqualFunc(p.parents, other.parents, func(x, y *PredictionContext) bool { + return x.Equals(y) + }) +} + +func (p *PredictionContext) SingletonEquals(other Collectable[*PredictionContext]) bool { + if other == nil { + return false + } + otherP := other.(*PredictionContext) + if otherP == nil { + return false + } + + if p.cachedHash != otherP.Hash() { + return false // Can't be same if hash is different + } + + if p.returnState != otherP.getReturnState(0) { + return false + } + + // Both parents must be nil if one is + if p.parentCtx == nil { + return otherP.parentCtx == nil + } + + return p.parentCtx.Equals(otherP.parentCtx) +} + +func (p *PredictionContext) GetParent(i int) *PredictionContext { + switch p.pcType { + case PredictionContextEmpty: + return nil + case PredictionContextSingleton: + return p.parentCtx + case PredictionContextArray: + return p.parents[i] + } + return nil +} + +func (p *PredictionContext) getReturnState(i int) int { + switch p.pcType { + case PredictionContextArray: + return p.returnStates[i] + default: + return p.returnState + } +} + +func (p *PredictionContext) GetReturnStates() []int { + switch p.pcType { + case PredictionContextArray: + return p.returnStates + default: + return []int{p.returnState} + } +} + +func (p *PredictionContext) length() int { + switch p.pcType { + case PredictionContextArray: + return len(p.returnStates) + default: + return 1 + } +} + +func (p *PredictionContext) hasEmptyPath() bool { + switch p.pcType { + case PredictionContextSingleton: + return p.returnState == BasePredictionContextEmptyReturnState + } + return p.getReturnState(p.length()-1) == BasePredictionContextEmptyReturnState +} + +func (p *PredictionContext) String() string { + switch p.pcType { + case PredictionContextEmpty: + return "$" + case PredictionContextSingleton: + var up string + + if p.parentCtx == nil { + up = "" + } else { + up = p.parentCtx.String() + } + + if len(up) == 0 { + if p.returnState == BasePredictionContextEmptyReturnState { + return "$" + } + + return strconv.Itoa(p.returnState) + } + + return strconv.Itoa(p.returnState) + " " + up + case PredictionContextArray: + if p.isEmpty() { + return "[]" + } + + s := "[" + for i := 0; i < len(p.returnStates); i++ { + if i > 0 { + s = s + ", " + } + if p.returnStates[i] == BasePredictionContextEmptyReturnState { + s = s + "$" + continue + } + s = s + strconv.Itoa(p.returnStates[i]) + if !p.parents[i].isEmpty() { + s = s + " " + p.parents[i].String() + } else { + s = s + "nil" + } + } + return s + "]" + + default: + return "unknown" + } +} + +func (p *PredictionContext) isEmpty() bool { + switch p.pcType { + case PredictionContextEmpty: + return true + case PredictionContextArray: + // since EmptyReturnState can only appear in the last position, we + // don't need to verify that size==1 + return p.returnStates[0] == BasePredictionContextEmptyReturnState + default: + return false + } +} + +func (p *PredictionContext) Type() int { + return p.pcType +} + +func calculateHash(parent *PredictionContext, returnState int) int { + h := murmurInit(1) + h = murmurUpdate(h, parent.Hash()) + h = murmurUpdate(h, returnState) + return murmurFinish(h, 2) +} + +// Convert a {@link RuleContext} tree to a {@link BasePredictionContext} graph. +// Return {@link //EMPTY} if {@code outerContext} is empty or nil. +// / +func predictionContextFromRuleContext(a *ATN, outerContext RuleContext) *PredictionContext { + if outerContext == nil { + outerContext = ParserRuleContextEmpty + } + // if we are in RuleContext of start rule, s, then BasePredictionContext + // is EMPTY. Nobody called us. (if we are empty, return empty) + if outerContext.GetParent() == nil || outerContext == ParserRuleContextEmpty { + return BasePredictionContextEMPTY + } + // If we have a parent, convert it to a BasePredictionContext graph + parent := predictionContextFromRuleContext(a, outerContext.GetParent().(RuleContext)) + state := a.states[outerContext.GetInvokingState()] + transition := state.GetTransitions()[0] + + return SingletonBasePredictionContextCreate(parent, transition.(*RuleTransition).followState.GetStateNumber()) +} + +func merge(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext { + + // Share same graph if both same + // + if a == b || a.Equals(b) { + return a + } + + if a.pcType == PredictionContextSingleton && b.pcType == PredictionContextSingleton { + return mergeSingletons(a, b, rootIsWildcard, mergeCache) + } + // At least one of a or b is array + // If one is $ and rootIsWildcard, return $ as wildcard + if rootIsWildcard { + if a.isEmpty() { + return a + } + if b.isEmpty() { + return b + } + } + + // Convert either Singleton or Empty to arrays, so that we can merge them + // + ara := convertToArray(a) + arb := convertToArray(b) + return mergeArrays(ara, arb, rootIsWildcard, mergeCache) +} + +func convertToArray(pc *PredictionContext) *PredictionContext { + switch pc.Type() { + case PredictionContextEmpty: + return NewArrayPredictionContext([]*PredictionContext{}, []int{}) + case PredictionContextSingleton: + return NewArrayPredictionContext([]*PredictionContext{pc.GetParent(0)}, []int{pc.getReturnState(0)}) + default: + // Already an array + } + return pc +} + +// mergeSingletons merges two Singleton [PredictionContext] instances. +// +// Stack tops equal, parents merge is same return left graph. +//

+// +//

Same stack top, parents differ merge parents giving array node, then +// remainders of those graphs. A new root node is created to point to the +// merged parents.
+//

+// +//

Different stack tops pointing to same parent. Make array node for the +// root where both element in the root point to the same (original) +// parent.
+//

+// +//

Different stack tops pointing to different parents. Make array node for +// the root where each element points to the corresponding original +// parent.
+//

+// +// @param a the first {@link SingletonBasePredictionContext} +// @param b the second {@link SingletonBasePredictionContext} +// @param rootIsWildcard {@code true} if this is a local-context merge, +// otherwise false to indicate a full-context merge +// @param mergeCache +// / +func mergeSingletons(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext { + if mergeCache != nil { + previous, present := mergeCache.Get(a, b) + if present { + return previous + } + previous, present = mergeCache.Get(b, a) + if present { + return previous + } + } + + rootMerge := mergeRoot(a, b, rootIsWildcard) + if rootMerge != nil { + if mergeCache != nil { + mergeCache.Put(a, b, rootMerge) + } + return rootMerge + } + if a.returnState == b.returnState { + parent := merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache) + // if parent is same as existing a or b parent or reduced to a parent, + // return it + if parent.Equals(a.parentCtx) { + return a // ax + bx = ax, if a=b + } + if parent.Equals(b.parentCtx) { + return b // ax + bx = bx, if a=b + } + // else: ax + ay = a'[x,y] + // merge parents x and y, giving array node with x,y then remainders + // of those graphs. dup a, a' points at merged array. + // New joined parent so create a new singleton pointing to it, a' + spc := SingletonBasePredictionContextCreate(parent, a.returnState) + if mergeCache != nil { + mergeCache.Put(a, b, spc) + } + return spc + } + // a != b payloads differ + // see if we can collapse parents due to $+x parents if local ctx + var singleParent *PredictionContext + if a.Equals(b) || (a.parentCtx != nil && a.parentCtx.Equals(b.parentCtx)) { // ax + + // bx = + // [a,b]x + singleParent = a.parentCtx + } + if singleParent != nil { // parents are same + // sort payloads and use same parent + payloads := []int{a.returnState, b.returnState} + if a.returnState > b.returnState { + payloads[0] = b.returnState + payloads[1] = a.returnState + } + parents := []*PredictionContext{singleParent, singleParent} + apc := NewArrayPredictionContext(parents, payloads) + if mergeCache != nil { + mergeCache.Put(a, b, apc) + } + return apc + } + // parents differ and can't merge them. Just pack together + // into array can't merge. + // ax + by = [ax,by] + payloads := []int{a.returnState, b.returnState} + parents := []*PredictionContext{a.parentCtx, b.parentCtx} + if a.returnState > b.returnState { // sort by payload + payloads[0] = b.returnState + payloads[1] = a.returnState + parents = []*PredictionContext{b.parentCtx, a.parentCtx} + } + apc := NewArrayPredictionContext(parents, payloads) + if mergeCache != nil { + mergeCache.Put(a, b, apc) + } + return apc +} + +// Handle case where at least one of {@code a} or {@code b} is +// {@link //EMPTY}. In the following diagrams, the symbol {@code $} is used +// to represent {@link //EMPTY}. +// +//

Local-Context Merges

+// +//

These local-context merge operations are used when {@code rootIsWildcard} +// is true.

+// +//

{@link //EMPTY} is superset of any graph return {@link //EMPTY}.
+//

+// +//

{@link //EMPTY} and anything is {@code //EMPTY}, so merged parent is +// {@code //EMPTY} return left graph.
+//

+// +//

Special case of last merge if local context.
+//

+// +//

Full-Context Merges

+// +//

These full-context merge operations are used when {@code rootIsWildcard} +// is false.

+// +//

+// +//

Must keep all contexts {@link //EMPTY} in array is a special value (and +// nil parent).
+//

+// +//

+// +// @param a the first {@link SingletonBasePredictionContext} +// @param b the second {@link SingletonBasePredictionContext} +// @param rootIsWildcard {@code true} if this is a local-context merge, +// otherwise false to indicate a full-context merge +// / +func mergeRoot(a, b *PredictionContext, rootIsWildcard bool) *PredictionContext { + if rootIsWildcard { + if a.pcType == PredictionContextEmpty { + return BasePredictionContextEMPTY // // + b =// + } + if b.pcType == PredictionContextEmpty { + return BasePredictionContextEMPTY // a +// =// + } + } else { + if a.isEmpty() && b.isEmpty() { + return BasePredictionContextEMPTY // $ + $ = $ + } else if a.isEmpty() { // $ + x = [$,x] + payloads := []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState} + parents := []*PredictionContext{b.GetParent(-1), nil} + return NewArrayPredictionContext(parents, payloads) + } else if b.isEmpty() { // x + $ = [$,x] ($ is always first if present) + payloads := []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState} + parents := []*PredictionContext{a.GetParent(-1), nil} + return NewArrayPredictionContext(parents, payloads) + } + } + return nil +} + +// Merge two {@link ArrayBasePredictionContext} instances. +// +//

Different tops, different parents.
+//

+// +//

Shared top, same parents.
+//

+// +//

Shared top, different parents.
+//

+// +//

Shared top, all shared parents.
+//

+// +//

Equal tops, merge parents and reduce top to +// {@link SingletonBasePredictionContext}.
+//

+// +//goland:noinspection GoBoolExpressions +func mergeArrays(a, b *PredictionContext, rootIsWildcard bool, mergeCache *JPCMap) *PredictionContext { + if mergeCache != nil { + previous, present := mergeCache.Get(a, b) + if present { + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous") + } + return previous + } + previous, present = mergeCache.Get(b, a) + if present { + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> previous") + } + return previous + } + } + // merge sorted payloads a + b => M + i := 0 // walks a + j := 0 // walks b + k := 0 // walks target M array + + mergedReturnStates := make([]int, len(a.returnStates)+len(b.returnStates)) + mergedParents := make([]*PredictionContext, len(a.returnStates)+len(b.returnStates)) + // walk and merge to yield mergedParents, mergedReturnStates + for i < len(a.returnStates) && j < len(b.returnStates) { + aParent := a.parents[i] + bParent := b.parents[j] + if a.returnStates[i] == b.returnStates[j] { + // same payload (stack tops are equal), must yield merged singleton + payload := a.returnStates[i] + // $+$ = $ + bothDollars := payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil + axAX := aParent != nil && bParent != nil && aParent.Equals(bParent) // ax+ax + // -> + // ax + if bothDollars || axAX { + mergedParents[k] = aParent // choose left + mergedReturnStates[k] = payload + } else { // ax+ay -> a'[x,y] + mergedParent := merge(aParent, bParent, rootIsWildcard, mergeCache) + mergedParents[k] = mergedParent + mergedReturnStates[k] = payload + } + i++ // hop over left one as usual + j++ // but also Skip one in right side since we merge + } else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M + mergedParents[k] = aParent + mergedReturnStates[k] = a.returnStates[i] + i++ + } else { // b > a, copy b[j] to M + mergedParents[k] = bParent + mergedReturnStates[k] = b.returnStates[j] + j++ + } + k++ + } + // copy over any payloads remaining in either array + if i < len(a.returnStates) { + for p := i; p < len(a.returnStates); p++ { + mergedParents[k] = a.parents[p] + mergedReturnStates[k] = a.returnStates[p] + k++ + } + } else { + for p := j; p < len(b.returnStates); p++ { + mergedParents[k] = b.parents[p] + mergedReturnStates[k] = b.returnStates[p] + k++ + } + } + // trim merged if we combined a few that had same stack tops + if k < len(mergedParents) { // write index < last position trim + if k == 1 { // for just one merged element, return singleton top + pc := SingletonBasePredictionContextCreate(mergedParents[0], mergedReturnStates[0]) + if mergeCache != nil { + mergeCache.Put(a, b, pc) + } + return pc + } + mergedParents = mergedParents[0:k] + mergedReturnStates = mergedReturnStates[0:k] + } + + M := NewArrayPredictionContext(mergedParents, mergedReturnStates) + + // if we created same array as a or b, return that instead + // TODO: JI track whether this is possible above during merge sort for speed and possibly avoid an allocation + if M.Equals(a) { + if mergeCache != nil { + mergeCache.Put(a, b, a) + } + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> a") + } + return a + } + if M.Equals(b) { + if mergeCache != nil { + mergeCache.Put(a, b, b) + } + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> b") + } + return b + } + combineCommonParents(&mergedParents) + + if mergeCache != nil { + mergeCache.Put(a, b, M) + } + if runtimeConfig.parserATNSimulatorTraceATNSim { + fmt.Println("mergeArrays a=" + a.String() + ",b=" + b.String() + " -> " + M.String()) + } + return M +} + +// Make pass over all M parents and merge any Equals() ones. +// Note that we pass a pointer to the slice as we want to modify it in place. +// +//goland:noinspection GoUnusedFunction +func combineCommonParents(parents *[]*PredictionContext) { + uniqueParents := NewJStore[*PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCollection, "combineCommonParents for PredictionContext") + + for p := 0; p < len(*parents); p++ { + parent := (*parents)[p] + _, _ = uniqueParents.Put(parent) + } + for q := 0; q < len(*parents); q++ { + pc, _ := uniqueParents.Get((*parents)[q]) + (*parents)[q] = pc + } +} + +func getCachedBasePredictionContext(context *PredictionContext, contextCache *PredictionContextCache, visited *VisitRecord) *PredictionContext { + if context.isEmpty() { + return context + } + existing, present := visited.Get(context) + if present { + return existing + } + + existing, present = contextCache.Get(context) + if present { + visited.Put(context, existing) + return existing + } + changed := false + parents := make([]*PredictionContext, context.length()) + for i := 0; i < len(parents); i++ { + parent := getCachedBasePredictionContext(context.GetParent(i), contextCache, visited) + if changed || !parent.Equals(context.GetParent(i)) { + if !changed { + parents = make([]*PredictionContext, context.length()) + for j := 0; j < context.length(); j++ { + parents[j] = context.GetParent(j) + } + changed = true + } + parents[i] = parent + } + } + if !changed { + contextCache.add(context) + visited.Put(context, context) + return context + } + var updated *PredictionContext + if len(parents) == 0 { + updated = BasePredictionContextEMPTY + } else if len(parents) == 1 { + updated = SingletonBasePredictionContextCreate(parents[0], context.getReturnState(0)) + } else { + updated = NewArrayPredictionContext(parents, context.GetReturnStates()) + } + contextCache.add(updated) + visited.Put(updated, updated) + visited.Put(context, updated) + + return updated +} diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go new file mode 100644 index 000000000..25dfb11e8 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_context_cache.go @@ -0,0 +1,48 @@ +package antlr + +var BasePredictionContextEMPTY = &PredictionContext{ + cachedHash: calculateEmptyHash(), + pcType: PredictionContextEmpty, + returnState: BasePredictionContextEmptyReturnState, +} + +// PredictionContextCache is Used to cache [PredictionContext] objects. It is used for the shared +// context cash associated with contexts in DFA states. This cache +// can be used for both lexers and parsers. +type PredictionContextCache struct { + cache *JMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]] +} + +func NewPredictionContextCache() *PredictionContextCache { + return &PredictionContextCache{ + cache: NewJMap[*PredictionContext, *PredictionContext, Comparator[*PredictionContext]](pContextEqInst, PredictionContextCacheCollection, "NewPredictionContextCache()"), + } +} + +// Add a context to the cache and return it. If the context already exists, +// return that one instead and do not add a new context to the cache. +// Protect shared cache from unsafe thread access. +func (p *PredictionContextCache) add(ctx *PredictionContext) *PredictionContext { + if ctx.isEmpty() { + return BasePredictionContextEMPTY + } + + // Put will return the existing entry if it is present (note this is done via Equals, not whether it is + // the same pointer), otherwise it will add the new entry and return that. + // + existing, present := p.cache.Get(ctx) + if present { + return existing + } + p.cache.Put(ctx, ctx) + return ctx +} + +func (p *PredictionContextCache) Get(ctx *PredictionContext) (*PredictionContext, bool) { + pc, exists := p.cache.Get(ctx) + return pc, exists +} + +func (p *PredictionContextCache) length() int { + return p.cache.Len() +} diff --git a/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go b/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go new file mode 100644 index 000000000..3f85a6a52 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/prediction_mode.go @@ -0,0 +1,536 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +// This enumeration defines the prediction modes available in ANTLR 4 along with +// utility methods for analyzing configuration sets for conflicts and/or +// ambiguities. + +const ( + // PredictionModeSLL represents the SLL(*) prediction mode. + // This prediction mode ignores the current + // parser context when making predictions. This is the fastest prediction + // mode, and provides correct results for many grammars. This prediction + // mode is more powerful than the prediction mode provided by ANTLR 3, but + // may result in syntax errors for grammar and input combinations which are + // not SLL. + // + // When using this prediction mode, the parser will either return a correct + // parse tree (i.e. the same parse tree that would be returned with the + // [PredictionModeLL] prediction mode), or it will Report a syntax error. If a + // syntax error is encountered when using the SLL prediction mode, + // it may be due to either an actual syntax error in the input or indicate + // that the particular combination of grammar and input requires the more + // powerful LL prediction abilities to complete successfully. + // + // This prediction mode does not provide any guarantees for prediction + // behavior for syntactically-incorrect inputs. + // + PredictionModeSLL = 0 + + // PredictionModeLL represents the LL(*) prediction mode. + // This prediction mode allows the current parser + // context to be used for resolving SLL conflicts that occur during + // prediction. This is the fastest prediction mode that guarantees correct + // parse results for all combinations of grammars with syntactically correct + // inputs. + // + // When using this prediction mode, the parser will make correct decisions + // for all syntactically-correct grammar and input combinations. However, in + // cases where the grammar is truly ambiguous this prediction mode might not + // report a precise answer for exactly which alternatives are + // ambiguous. + // + // This prediction mode does not provide any guarantees for prediction + // behavior for syntactically-incorrect inputs. + // + PredictionModeLL = 1 + + // PredictionModeLLExactAmbigDetection represents the LL(*) prediction mode + // with exact ambiguity detection. + // + // In addition to the correctness guarantees provided by the [PredictionModeLL] prediction mode, + // this prediction mode instructs the prediction algorithm to determine the + // complete and exact set of ambiguous alternatives for every ambiguous + // decision encountered while parsing. + // + // This prediction mode may be used for diagnosing ambiguities during + // grammar development. Due to the performance overhead of calculating sets + // of ambiguous alternatives, this prediction mode should be avoided when + // the exact results are not necessary. + // + // This prediction mode does not provide any guarantees for prediction + // behavior for syntactically-incorrect inputs. + // + PredictionModeLLExactAmbigDetection = 2 +) + +// PredictionModehasSLLConflictTerminatingPrediction computes the SLL prediction termination condition. +// +// This method computes the SLL prediction termination condition for both of +// the following cases: +// +// - The usual SLL+LL fallback upon SLL conflict +// - Pure SLL without LL fallback +// +// # Combined SLL+LL Parsing +// +// When LL-fallback is enabled upon SLL conflict, correct predictions are +// ensured regardless of how the termination condition is computed by this +// method. Due to the substantially higher cost of LL prediction, the +// prediction should only fall back to LL when the additional lookahead +// cannot lead to a unique SLL prediction. +// +// Assuming combined SLL+LL parsing, an SLL configuration set with only +// conflicting subsets should fall back to full LL, even if the +// configuration sets don't resolve to the same alternative, e.g. +// +// {1,2} and {3,4} +// +// If there is at least one non-conflicting +// configuration, SLL could continue with the hopes that more lookahead will +// resolve via one of those non-conflicting configurations. +// +// Here's the prediction termination rule them: SLL (for SLL+LL parsing) +// stops when it sees only conflicting configuration subsets. In contrast, +// full LL keeps going when there is uncertainty. +// +// # Heuristic +// +// As a heuristic, we stop prediction when we see any conflicting subset +// unless we see a state that only has one alternative associated with it. +// The single-alt-state thing lets prediction continue upon rules like +// (otherwise, it would admit defeat too soon): +// +// [12|1|[], 6|2|[], 12|2|[]]. s : (ID | ID ID?) ; +// +// When the [ATN] simulation reaches the state before ';', it has a +// [DFA] state that looks like: +// +// [12|1|[], 6|2|[], 12|2|[]] +// +// Naturally +// +// 12|1|[] and 12|2|[] +// +// conflict, but we cannot stop processing this node because alternative to has another way to continue, +// via +// +// [6|2|[]] +// +// It also let's us continue for this rule: +// +// [1|1|[], 1|2|[], 8|3|[]] a : A | A | A B ; +// +// After Matching input A, we reach the stop state for rule A, state 1. +// State 8 is the state immediately before B. Clearly alternatives 1 and 2 +// conflict and no amount of further lookahead will separate the two. +// However, alternative 3 will be able to continue, and so we do not stop +// working on this state. In the previous example, we're concerned with +// states associated with the conflicting alternatives. Here alt 3 is not +// associated with the conflicting configs, but since we can continue +// looking for input reasonably, don't declare the state done. +// +// # Pure SLL Parsing +// +// To handle pure SLL parsing, all we have to do is make sure that we +// combine stack contexts for configurations that differ only by semantic +// predicate. From there, we can do the usual SLL termination heuristic. +// +// # Predicates in SLL+LL Parsing +// +// SLL decisions don't evaluate predicates until after they reach [DFA] stop +// states because they need to create the [DFA] cache that works in all +// semantic situations. In contrast, full LL evaluates predicates collected +// during start state computation, so it can ignore predicates thereafter. +// This means that SLL termination detection can totally ignore semantic +// predicates. +// +// Implementation-wise, [ATNConfigSet] combines stack contexts but not +// semantic predicate contexts, so we might see two configurations like the +// following: +// +// (s, 1, x, {}), (s, 1, x', {p}) +// +// Before testing these configurations against others, we have to merge +// x and x' (without modifying the existing configurations). +// For example, we test (x+x')==x” when looking for conflicts in +// the following configurations: +// +// (s, 1, x, {}), (s, 1, x', {p}), (s, 2, x”, {}) +// +// If the configuration set has predicates (as indicated by +// [ATNConfigSet.hasSemanticContext]), this algorithm makes a copy of +// the configurations to strip out all the predicates so that a standard +// [ATNConfigSet] will merge everything ignoring predicates. +func PredictionModehasSLLConflictTerminatingPrediction(mode int, configs *ATNConfigSet) bool { + + // Configs in rule stop states indicate reaching the end of the decision + // rule (local context) or end of start rule (full context). If all + // configs meet this condition, then none of the configurations is able + // to Match additional input, so we terminate prediction. + // + if PredictionModeallConfigsInRuleStopStates(configs) { + return true + } + + // pure SLL mode parsing + if mode == PredictionModeSLL { + // Don't bother with combining configs from different semantic + // contexts if we can fail over to full LL costs more time + // since we'll often fail over anyway. + if configs.hasSemanticContext { + // dup configs, tossing out semantic predicates + dup := NewATNConfigSet(false) + for _, c := range configs.configs { + + // NewATNConfig({semanticContext:}, c) + c = NewATNConfig2(c, SemanticContextNone) + dup.Add(c, nil) + } + configs = dup + } + // now we have combined contexts for configs with dissimilar predicates + } + // pure SLL or combined SLL+LL mode parsing + altsets := PredictionModegetConflictingAltSubsets(configs) + return PredictionModehasConflictingAltSet(altsets) && !PredictionModehasStateAssociatedWithOneAlt(configs) +} + +// PredictionModehasConfigInRuleStopState checks if any configuration in the given configs is in a +// [RuleStopState]. Configurations meeting this condition have reached +// the end of the decision rule (local context) or end of start rule (full +// context). +// +// The func returns true if any configuration in the supplied configs is in a [RuleStopState] +func PredictionModehasConfigInRuleStopState(configs *ATNConfigSet) bool { + for _, c := range configs.configs { + if _, ok := c.GetState().(*RuleStopState); ok { + return true + } + } + return false +} + +// PredictionModeallConfigsInRuleStopStates checks if all configurations in configs are in a +// [RuleStopState]. Configurations meeting this condition have reached +// the end of the decision rule (local context) or end of start rule (full +// context). +// +// the func returns true if all configurations in configs are in a +// [RuleStopState] +func PredictionModeallConfigsInRuleStopStates(configs *ATNConfigSet) bool { + + for _, c := range configs.configs { + if _, ok := c.GetState().(*RuleStopState); !ok { + return false + } + } + return true +} + +// PredictionModeresolvesToJustOneViableAlt checks full LL prediction termination. +// +// Can we stop looking ahead during [ATN] simulation or is there some +// uncertainty as to which alternative we will ultimately pick, after +// consuming more input? Even if there are partial conflicts, we might know +// that everything is going to resolve to the same minimum alternative. That +// means we can stop since no more lookahead will change that fact. On the +// other hand, there might be multiple conflicts that resolve to different +// minimums. That means we need more look ahead to decide which of those +// alternatives we should predict. +// +// The basic idea is to split the set of configurations 'C', into +// conflicting subsets (s, _, ctx, _) and singleton subsets with +// non-conflicting configurations. Two configurations conflict if they have +// identical [ATNConfig].state and [ATNConfig].context values +// but a different [ATNConfig].alt value, e.g. +// +// (s, i, ctx, _) +// +// and +// +// (s, j, ctx, _) ; for i != j +// +// Reduce these configuration subsets to the set of possible alternatives. +// You can compute the alternative subsets in one pass as follows: +// +// A_s,ctx = {i | (s, i, ctx, _)} +// +// for each configuration in C holding s and ctx fixed. +// +// Or in pseudo-code: +// +// for each configuration c in C: +// map[c] U = c.ATNConfig.alt alt // map hash/equals uses s and x, not alt and not pred +// +// The values in map are the set of +// +// A_s,ctx +// +// sets. +// +// If +// +// |A_s,ctx| = 1 +// +// then there is no conflict associated with s and ctx. +// +// Reduce the subsets to singletons by choosing a minimum of each subset. If +// the union of these alternative subsets is a singleton, then no amount of +// further lookahead will help us. We will always pick that alternative. If, +// however, there is more than one alternative, then we are uncertain which +// alternative to predict and must continue looking for resolution. We may +// or may not discover an ambiguity in the future, even if there are no +// conflicting subsets this round. +// +// The biggest sin is to terminate early because it means we've made a +// decision but were uncertain as to the eventual outcome. We haven't used +// enough lookahead. On the other hand, announcing a conflict too late is no +// big deal; you will still have the conflict. It's just inefficient. It +// might even look until the end of file. +// +// No special consideration for semantic predicates is required because +// predicates are evaluated on-the-fly for full LL prediction, ensuring that +// no configuration contains a semantic context during the termination +// check. +// +// # Conflicting Configs +// +// Two configurations: +// +// (s, i, x) and (s, j, x') +// +// conflict when i != j but x = x'. Because we merge all +// (s, i, _) configurations together, that means that there are at +// most n configurations associated with state s for +// n possible alternatives in the decision. The merged stacks +// complicate the comparison of configuration contexts x and x'. +// +// Sam checks to see if one is a subset of the other by calling +// merge and checking to see if the merged result is either x or x'. +// If the x associated with lowest alternative i +// is the superset, then i is the only possible prediction since the +// others resolve to min(i) as well. However, if x is +// associated with j > i then at least one stack configuration for +// j is not in conflict with alternative i. The algorithm +// should keep going, looking for more lookahead due to the uncertainty. +// +// For simplicity, I'm doing an equality check between x and +// x', which lets the algorithm continue to consume lookahead longer +// than necessary. The reason I like the equality is of course the +// simplicity but also because that is the test you need to detect the +// alternatives that are actually in conflict. +// +// # Continue/Stop Rule +// +// Continue if the union of resolved alternative sets from non-conflicting and +// conflicting alternative subsets has more than one alternative. We are +// uncertain about which alternative to predict. +// +// The complete set of alternatives, +// +// [i for (_, i, _)] +// +// tells us which alternatives are still in the running for the amount of input we've +// consumed at this point. The conflicting sets let us to strip away +// configurations that won't lead to more states because we resolve +// conflicts to the configuration with a minimum alternate for the +// conflicting set. +// +// Cases +// +// - no conflicts and more than 1 alternative in set => continue +// - (s, 1, x), (s, 2, x), (s, 3, z), (s', 1, y), (s', 2, y) yields non-conflicting set +// {3} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1,3} => continue +// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y), (s”, 1, z) yields non-conflicting set +// {1} ∪ conflicting sets min({1,2}) ∪ min({1,2}) = {1} => stop and predict 1 +// - (s, 1, x), (s, 2, x), (s', 1, y), (s', 2, y) yields conflicting, reduced sets +// {1} ∪ {1} = {1} => stop and predict 1, can announce ambiguity {1,2} +// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets +// {1} ∪ {2} = {1,2} => continue +// - (s, 1, x), (s, 2, x), (s', 2, y), (s', 3, y) yields conflicting, reduced sets +// {1} ∪ {2} = {1,2} => continue +// - (s, 1, x), (s, 2, x), (s', 3, y), (s', 4, y) yields conflicting, reduced sets +// {1} ∪ {3} = {1,3} => continue +// +// # Exact Ambiguity Detection +// +// If all states report the same conflicting set of alternatives, then we +// know we have the exact ambiguity set: +// +// |A_i| > 1 +// +// and +// +// A_i = A_j ; for all i, j +// +// In other words, we continue examining lookahead until all A_i +// have more than one alternative and all A_i are the same. If +// +// A={{1,2}, {1,3}} +// +// then regular LL prediction would terminate because the resolved set is {1}. +// To determine what the real ambiguity is, we have to know whether the ambiguity is between one and +// two or one and three so we keep going. We can only stop prediction when +// we need exact ambiguity detection when the sets look like: +// +// A={{1,2}} +// +// or +// +// {{1,2},{1,2}}, etc... +func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) int { + return PredictionModegetSingleViableAlt(altsets) +} + +// PredictionModeallSubsetsConflict determines if every alternative subset in altsets contains more +// than one alternative. +// +// The func returns true if every [BitSet] in altsets has +// [BitSet].cardinality cardinality > 1 +func PredictionModeallSubsetsConflict(altsets []*BitSet) bool { + return !PredictionModehasNonConflictingAltSet(altsets) +} + +// PredictionModehasNonConflictingAltSet determines if any single alternative subset in altsets contains +// exactly one alternative. +// +// The func returns true if altsets contains at least one [BitSet] with +// [BitSet].cardinality cardinality 1 +func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool { + for i := 0; i < len(altsets); i++ { + alts := altsets[i] + if alts.length() == 1 { + return true + } + } + return false +} + +// PredictionModehasConflictingAltSet determines if any single alternative subset in altsets contains +// more than one alternative. +// +// The func returns true if altsets contains a [BitSet] with +// [BitSet].cardinality cardinality > 1, otherwise false +func PredictionModehasConflictingAltSet(altsets []*BitSet) bool { + for i := 0; i < len(altsets); i++ { + alts := altsets[i] + if alts.length() > 1 { + return true + } + } + return false +} + +// PredictionModeallSubsetsEqual determines if every alternative subset in altsets is equivalent. +// +// The func returns true if every member of altsets is equal to the others. +func PredictionModeallSubsetsEqual(altsets []*BitSet) bool { + var first *BitSet + + for i := 0; i < len(altsets); i++ { + alts := altsets[i] + if first == nil { + first = alts + } else if alts != first { + return false + } + } + + return true +} + +// PredictionModegetUniqueAlt returns the unique alternative predicted by all alternative subsets in +// altsets. If no such alternative exists, this method returns +// [ATNInvalidAltNumber]. +// +// @param altsets a collection of alternative subsets +func PredictionModegetUniqueAlt(altsets []*BitSet) int { + all := PredictionModeGetAlts(altsets) + if all.length() == 1 { + return all.minValue() + } + + return ATNInvalidAltNumber +} + +// PredictionModeGetAlts returns the complete set of represented alternatives for a collection of +// alternative subsets. This method returns the union of each [BitSet] +// in altsets, being the set of represented alternatives in altsets. +func PredictionModeGetAlts(altsets []*BitSet) *BitSet { + all := NewBitSet() + for _, alts := range altsets { + all.or(alts) + } + return all +} + +// PredictionModegetConflictingAltSubsets gets the conflicting alt subsets from a configuration set. +// +// for each configuration c in configs: +// map[c] U= c.ATNConfig.alt // map hash/equals uses s and x, not alt and not pred +func PredictionModegetConflictingAltSubsets(configs *ATNConfigSet) []*BitSet { + configToAlts := NewJMap[*ATNConfig, *BitSet, *ATNAltConfigComparator[*ATNConfig]](atnAltCfgEqInst, AltSetCollection, "PredictionModegetConflictingAltSubsets()") + + for _, c := range configs.configs { + + alts, ok := configToAlts.Get(c) + if !ok { + alts = NewBitSet() + configToAlts.Put(c, alts) + } + alts.add(c.GetAlt()) + } + + return configToAlts.Values() +} + +// PredictionModeGetStateToAltMap gets a map from state to alt subset from a configuration set. +// +// for each configuration c in configs: +// map[c.ATNConfig.state] U= c.ATNConfig.alt} +func PredictionModeGetStateToAltMap(configs *ATNConfigSet) *AltDict { + m := NewAltDict() + + for _, c := range configs.configs { + alts := m.Get(c.GetState().String()) + if alts == nil { + alts = NewBitSet() + m.put(c.GetState().String(), alts) + } + alts.(*BitSet).add(c.GetAlt()) + } + return m +} + +func PredictionModehasStateAssociatedWithOneAlt(configs *ATNConfigSet) bool { + values := PredictionModeGetStateToAltMap(configs).values() + for i := 0; i < len(values); i++ { + if values[i].(*BitSet).length() == 1 { + return true + } + } + return false +} + +// PredictionModegetSingleViableAlt gets the single alternative predicted by all alternative subsets in altsets +// if there is one. +// +// TODO: JI - Review this code - it does not seem to do the same thing as the Java code - maybe because [BitSet] is not like the Java utils BitSet +func PredictionModegetSingleViableAlt(altsets []*BitSet) int { + result := ATNInvalidAltNumber + + for i := 0; i < len(altsets); i++ { + alts := altsets[i] + minAlt := alts.minValue() + if result == ATNInvalidAltNumber { + result = minAlt + } else if result != minAlt { // more than 1 viable alt + return ATNInvalidAltNumber + } + } + return result +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/recognizer.go b/vendor/github.com/antlr4-go/antlr/v4/recognizer.go similarity index 70% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/recognizer.go rename to vendor/github.com/antlr4-go/antlr/v4/recognizer.go index bfe542d09..2e0b504fb 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/recognizer.go +++ b/vendor/github.com/antlr4-go/antlr/v4/recognizer.go @@ -26,6 +26,9 @@ type Recognizer interface { RemoveErrorListeners() GetATN() *ATN GetErrorListenerDispatch() ErrorListener + HasError() bool + GetError() RecognitionException + SetError(RecognitionException) } type BaseRecognizer struct { @@ -36,6 +39,7 @@ type BaseRecognizer struct { LiteralNames []string SymbolicNames []string GrammarFileName string + SynErr RecognitionException } func NewBaseRecognizer() *BaseRecognizer { @@ -45,7 +49,10 @@ func NewBaseRecognizer() *BaseRecognizer { return rec } +//goland:noinspection GoUnusedGlobalVariable var tokenTypeMapCache = make(map[string]int) + +//goland:noinspection GoUnusedGlobalVariable var ruleIndexMapCache = make(map[string]int) func (b *BaseRecognizer) checkVersion(toolVersion string) { @@ -55,7 +62,19 @@ func (b *BaseRecognizer) checkVersion(toolVersion string) { } } -func (b *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int) { +func (b *BaseRecognizer) SetError(err RecognitionException) { + b.SynErr = err +} + +func (b *BaseRecognizer) HasError() bool { + return b.SynErr != nil +} + +func (b *BaseRecognizer) GetError() RecognitionException { + return b.SynErr +} + +func (b *BaseRecognizer) Action(_ RuleContext, _, _ int) { panic("action not implemented on Recognizer!") } @@ -105,9 +124,11 @@ func (b *BaseRecognizer) SetState(v int) { // return result //} -// Get a map from rule names to rule indexes. +// GetRuleIndexMap Get a map from rule names to rule indexes. // -//

Used for XPath and tree pattern compilation.

+// Used for XPath and tree pattern compilation. +// +// TODO: JI This is not yet implemented in the Go runtime. Maybe not needed. func (b *BaseRecognizer) GetRuleIndexMap() map[string]int { panic("Method not defined!") @@ -124,7 +145,8 @@ func (b *BaseRecognizer) GetRuleIndexMap() map[string]int { // return result } -func (b *BaseRecognizer) GetTokenType(tokenName string) int { +// GetTokenType get the token type based upon its name +func (b *BaseRecognizer) GetTokenType(_ string) int { panic("Method not defined!") // var ttype = b.GetTokenTypeMap()[tokenName] // if (ttype !=nil) { @@ -162,26 +184,27 @@ func (b *BaseRecognizer) GetTokenType(tokenName string) int { // } //} -// What is the error header, normally line/character position information?// +// GetErrorHeader returns the error header, normally line/character position information. +// +// Can be overridden in sub structs embedding BaseRecognizer. func (b *BaseRecognizer) GetErrorHeader(e RecognitionException) string { line := e.GetOffendingToken().GetLine() column := e.GetOffendingToken().GetColumn() return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column) } -// How should a token be displayed in an error message? The default +// GetTokenErrorDisplay shows how a token should be displayed in an error message. // -// is to display just the text, but during development you might -// want to have a lot of information spit out. Override in that case -// to use t.String() (which, for CommonToken, dumps everything about -// the token). This is better than forcing you to override a method in -// your token objects because you don't have to go modify your lexer -// so that it creates a NewJava type. +// The default is to display just the text, but during development you might +// want to have a lot of information spit out. Override in that case +// to use t.String() (which, for CommonToken, dumps everything about +// the token). This is better than forcing you to override a method in +// your token objects because you don't have to go modify your lexer +// so that it creates a NewJava type. // -// @deprecated This method is not called by the ANTLR 4 Runtime. Specific -// implementations of {@link ANTLRErrorStrategy} may provide a similar -// feature when necessary. For example, see -// {@link DefaultErrorStrategy//GetTokenErrorDisplay}. +// Deprecated: This method is not called by the ANTLR 4 Runtime. Specific +// implementations of [ANTLRErrorStrategy] may provide a similar +// feature when necessary. For example, see [DefaultErrorStrategy].GetTokenErrorDisplay() func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string { if t == nil { return "" @@ -205,12 +228,14 @@ func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener { return NewProxyErrorListener(b.listeners) } -// subclass needs to override these if there are sempreds or actions -// that the ATN interp needs to execute -func (b *BaseRecognizer) Sempred(localctx RuleContext, ruleIndex int, actionIndex int) bool { +// Sempred embedding structs need to override this if there are sempreds or actions +// that the ATN interpreter needs to execute +func (b *BaseRecognizer) Sempred(_ RuleContext, _ int, _ int) bool { return true } -func (b *BaseRecognizer) Precpred(localctx RuleContext, precedence int) bool { +// Precpred embedding structs need to override this if there are preceding predicates +// that the ATN interpreter needs to execute +func (b *BaseRecognizer) Precpred(_ RuleContext, _ int) bool { return true } diff --git a/vendor/github.com/antlr4-go/antlr/v4/rule_context.go b/vendor/github.com/antlr4-go/antlr/v4/rule_context.go new file mode 100644 index 000000000..f2ad04793 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/rule_context.go @@ -0,0 +1,40 @@ +// Copyright (c) 2012-2022 The ANTLR Project. All rights reserved. +// Use of this file is governed by the BSD 3-clause license that +// can be found in the LICENSE.txt file in the project root. + +package antlr + +// RuleContext is a record of a single rule invocation. It knows +// which context invoked it, if any. If there is no parent context, then +// naturally the invoking state is not valid. The parent link +// provides a chain upwards from the current rule invocation to the root +// of the invocation tree, forming a stack. +// +// We actually carry no information about the rule associated with this context (except +// when parsing). We keep only the state number of the invoking state from +// the [ATN] submachine that invoked this. Contrast this with the s +// pointer inside [ParserRuleContext] that tracks the current state +// being "executed" for the current rule. +// +// The parent contexts are useful for computing lookahead sets and +// getting error information. +// +// These objects are used during parsing and prediction. +// For the special case of parsers, we use the struct +// [ParserRuleContext], which embeds a RuleContext. +// +// @see ParserRuleContext +type RuleContext interface { + RuleNode + + GetInvokingState() int + SetInvokingState(int) + + GetRuleIndex() int + IsEmpty() bool + + GetAltNumber() int + SetAltNumber(altNumber int) + + String([]string, RuleContext) string +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/semantic_context.go b/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go similarity index 92% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/semantic_context.go rename to vendor/github.com/antlr4-go/antlr/v4/semantic_context.go index a702e99de..68cb9061e 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/semantic_context.go +++ b/vendor/github.com/antlr4-go/antlr/v4/semantic_context.go @@ -9,14 +9,13 @@ import ( "strconv" ) -// A tree structure used to record the semantic context in which -// an ATN configuration is valid. It's either a single predicate, -// a conjunction {@code p1&&p2}, or a sum of products {@code p1||p2}. +// SemanticContext is a tree structure used to record the semantic context in which // -//

I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of -// {@link SemanticContext} within the scope of this outer class.

+// an ATN configuration is valid. It's either a single predicate, +// a conjunction p1 && p2, or a sum of products p1 || p2. // - +// I have scoped the AND, OR, and Predicate subclasses of +// [SemanticContext] within the scope of this outer ``class'' type SemanticContext interface { Equals(other Collectable[SemanticContext]) bool Hash() int @@ -80,7 +79,7 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate { var SemanticContextNone = NewPredicate(-1, -1, false) -func (p *Predicate) evalPrecedence(parser Recognizer, outerContext RuleContext) SemanticContext { +func (p *Predicate) evalPrecedence(_ Recognizer, _ RuleContext) SemanticContext { return p } @@ -198,7 +197,7 @@ type AND struct { func NewAND(a, b SemanticContext) *AND { - operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst) + operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewAND() operands") if aa, ok := a.(*AND); ok { for _, o := range aa.opnds { operands.Put(o) @@ -230,9 +229,7 @@ func NewAND(a, b SemanticContext) *AND { vs := operands.Values() opnds := make([]SemanticContext, len(vs)) - for i, v := range vs { - opnds[i] = v.(SemanticContext) - } + copy(opnds, vs) and := new(AND) and.opnds = opnds @@ -316,12 +313,12 @@ func (a *AND) Hash() int { return murmurFinish(h, len(a.opnds)) } -func (a *OR) Hash() int { - h := murmurInit(41) // Init with a value different from AND - for _, op := range a.opnds { +func (o *OR) Hash() int { + h := murmurInit(41) // Init with o value different from AND + for _, op := range o.opnds { h = murmurUpdate(h, op.Hash()) } - return murmurFinish(h, len(a.opnds)) + return murmurFinish(h, len(o.opnds)) } func (a *AND) String() string { @@ -349,7 +346,7 @@ type OR struct { func NewOR(a, b SemanticContext) *OR { - operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst) + operands := NewJStore[SemanticContext, Comparator[SemanticContext]](semctxEqInst, SemanticContextCollection, "NewOR() operands") if aa, ok := a.(*OR); ok { for _, o := range aa.opnds { operands.Put(o) @@ -382,9 +379,7 @@ func NewOR(a, b SemanticContext) *OR { vs := operands.Values() opnds := make([]SemanticContext, len(vs)) - for i, v := range vs { - opnds[i] = v.(SemanticContext) - } + copy(opnds, vs) o := new(OR) o.opnds = opnds diff --git a/vendor/github.com/antlr4-go/antlr/v4/statistics.go b/vendor/github.com/antlr4-go/antlr/v4/statistics.go new file mode 100644 index 000000000..70c0673a0 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/statistics.go @@ -0,0 +1,281 @@ +//go:build antlr.stats + +package antlr + +import ( + "fmt" + "log" + "os" + "path/filepath" + "sort" + "strconv" + "sync" +) + +// This file allows the user to collect statistics about the runtime of the ANTLR runtime. It is not enabled by default +// and so incurs no time penalty. To enable it, you must build the runtime with the antlr.stats build tag. +// + +// Tells various components to collect statistics - because it is only true when this file is included, it will +// allow the compiler to completely eliminate all the code that is only used when collecting statistics. +const collectStats = true + +// goRunStats is a collection of all the various data the ANTLR runtime has collected about a particular run. +// It is exported so that it can be used by others to look for things that are not already looked for in the +// runtime statistics. +type goRunStats struct { + + // jStats is a slice of all the [JStatRec] records that have been created, which is one for EVERY collection created + // during a run. It is exported so that it can be used by others to look for things that are not already looked for + // within this package. + // + jStats []*JStatRec + jStatsLock sync.RWMutex + topN int + topNByMax []*JStatRec + topNByUsed []*JStatRec + unusedCollections map[CollectionSource]int + counts map[CollectionSource]int +} + +const ( + collectionsFile = "collections" +) + +var ( + Statistics = &goRunStats{ + topN: 10, + } +) + +type statsOption func(*goRunStats) error + +// Configure allows the statistics system to be configured as the user wants and override the defaults +func (s *goRunStats) Configure(options ...statsOption) error { + for _, option := range options { + err := option(s) + if err != nil { + return err + } + } + return nil +} + +// WithTopN sets the number of things to list in the report when we are concerned with the top N things. +// +// For example, if you want to see the top 20 collections by size, you can do: +// +// antlr.Statistics.Configure(antlr.WithTopN(20)) +func WithTopN(topN int) statsOption { + return func(s *goRunStats) error { + s.topN = topN + return nil + } +} + +// Analyze looks through all the statistical records and computes all the outputs that might be useful to the user. +// +// The function gathers and analyzes a number of statistics about any particular run of +// an ANTLR generated recognizer. In the vast majority of cases, the statistics are only +// useful to maintainers of ANTLR itself, but they can be useful to users as well. They may be +// especially useful in tracking down bugs or performance problems when an ANTLR user could +// supply the output from this package, but cannot supply the grammar file(s) they are using, even +// privately to the maintainers. +// +// The statistics are gathered by the runtime itself, and are not gathered by the parser or lexer, but the user +// must call this function their selves to analyze the statistics. This is because none of the infrastructure is +// extant unless the calling program is built with the antlr.stats tag like so: +// +// go build -tags antlr.stats . +// +// When a program is built with the antlr.stats tag, the Statistics object is created and available outside +// the package. The user can then call the [Statistics.Analyze] function to analyze the statistics and then call the +// [Statistics.Report] function to report the statistics. +// +// Please forward any questions about this package to the ANTLR discussion groups on GitHub or send to them to +// me [Jim Idle] directly at jimi@idle.ws +// +// [Jim Idle]: https:://github.com/jim-idle +func (s *goRunStats) Analyze() { + + // Look for anything that looks strange and record it in our local maps etc for the report to present it + // + s.CollectionAnomalies() + s.TopNCollections() +} + +// TopNCollections looks through all the statistical records and gathers the top ten collections by size. +func (s *goRunStats) TopNCollections() { + + // Let's sort the stat records by MaxSize + // + sort.Slice(s.jStats, func(i, j int) bool { + return s.jStats[i].MaxSize > s.jStats[j].MaxSize + }) + + for i := 0; i < len(s.jStats) && i < s.topN; i++ { + s.topNByMax = append(s.topNByMax, s.jStats[i]) + } + + // Sort by the number of times used + // + sort.Slice(s.jStats, func(i, j int) bool { + return s.jStats[i].Gets+s.jStats[i].Puts > s.jStats[j].Gets+s.jStats[j].Puts + }) + for i := 0; i < len(s.jStats) && i < s.topN; i++ { + s.topNByUsed = append(s.topNByUsed, s.jStats[i]) + } +} + +// Report dumps a markdown formatted report of all the statistics collected during a run to the given dir output +// path, which should represent a directory. Generated files will be prefixed with the given prefix and will be +// given a type name such as `anomalies` and a time stamp such as `2021-09-01T12:34:56` and a .md suffix. +func (s *goRunStats) Report(dir string, prefix string) error { + + isDir, err := isDirectory(dir) + switch { + case err != nil: + return err + case !isDir: + return fmt.Errorf("output directory `%s` is not a directory", dir) + } + s.reportCollections(dir, prefix) + + // Clean out any old data in case the user forgets + // + s.Reset() + return nil +} + +func (s *goRunStats) Reset() { + s.jStats = nil + s.topNByUsed = nil + s.topNByMax = nil +} + +func (s *goRunStats) reportCollections(dir, prefix string) { + cname := filepath.Join(dir, ".asciidoctor") + // If the file doesn't exist, create it, or append to the file + f, err := os.OpenFile(cname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + log.Fatal(err) + } + _, _ = f.WriteString(`// .asciidoctorconfig +++++ + +++++`) + _ = f.Close() + + fname := filepath.Join(dir, prefix+"_"+"_"+collectionsFile+"_"+".adoc") + // If the file doesn't exist, create it, or append to the file + f, err = os.OpenFile(fname, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) + if err != nil { + log.Fatal(err) + } + defer func(f *os.File) { + err := f.Close() + if err != nil { + log.Fatal(err) + } + }(f) + _, _ = f.WriteString("= Collections for " + prefix + "\n\n") + + _, _ = f.WriteString("== Summary\n") + + if s.unusedCollections != nil { + _, _ = f.WriteString("=== Unused Collections\n") + _, _ = f.WriteString("Unused collections incur a penalty for allocation that makes them a candidate for either\n") + _, _ = f.WriteString(" removal or optimization. If you are using a collection that is not used, you should\n") + _, _ = f.WriteString(" consider removing it. If you are using a collection that is used, but not very often,\n") + _, _ = f.WriteString(" you should consider using lazy initialization to defer the allocation until it is\n") + _, _ = f.WriteString(" actually needed.\n\n") + + _, _ = f.WriteString("\n.Unused collections\n") + _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n") + _, _ = f.WriteString("|===\n") + _, _ = f.WriteString("| Type | Count\n") + + for k, v := range s.unusedCollections { + _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n") + } + f.WriteString("|===\n\n") + } + + _, _ = f.WriteString("\n.Summary of Collections\n") + _, _ = f.WriteString(`[cols="<3,>1"]` + "\n\n") + _, _ = f.WriteString("|===\n") + _, _ = f.WriteString("| Type | Count\n") + for k, v := range s.counts { + _, _ = f.WriteString("| " + CollectionDescriptors[k].SybolicName + " | " + strconv.Itoa(v) + "\n") + } + _, _ = f.WriteString("| Total | " + strconv.Itoa(len(s.jStats)) + "\n") + _, _ = f.WriteString("|===\n\n") + + _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by MaxSize\n") + _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1"]` + "\n\n") + _, _ = f.WriteString("|===\n") + _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets\n") + for _, c := range s.topNByMax { + _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n") + _, _ = f.WriteString("| " + c.Description + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n") + _, _ = f.WriteString("\n") + } + _, _ = f.WriteString("|===\n\n") + + _, _ = f.WriteString("\n.Summary of Top " + strconv.Itoa(s.topN) + " Collections by Access\n") + _, _ = f.WriteString(`[cols="<1,<3,>1,>1,>1,>1,>1"]` + "\n\n") + _, _ = f.WriteString("|===\n") + _, _ = f.WriteString("| Source | Description | MaxSize | EndSize | Puts | Gets | P+G\n") + for _, c := range s.topNByUsed { + _, _ = f.WriteString("| " + CollectionDescriptors[c.Source].SybolicName + "\n") + _, _ = f.WriteString("| " + c.Description + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.MaxSize) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.CurSize) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.Puts) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.Gets) + "\n") + _, _ = f.WriteString("| " + strconv.Itoa(c.Gets+c.Puts) + "\n") + _, _ = f.WriteString("\n") + } + _, _ = f.WriteString("|===\n\n") +} + +// AddJStatRec adds a [JStatRec] record to the [goRunStats] collection when build runtimeConfig antlr.stats is enabled. +func (s *goRunStats) AddJStatRec(rec *JStatRec) { + s.jStatsLock.Lock() + defer s.jStatsLock.Unlock() + s.jStats = append(s.jStats, rec) +} + +// CollectionAnomalies looks through all the statistical records and gathers any anomalies that have been found. +func (s *goRunStats) CollectionAnomalies() { + s.jStatsLock.RLock() + defer s.jStatsLock.RUnlock() + s.counts = make(map[CollectionSource]int, len(s.jStats)) + for _, c := range s.jStats { + + // Accumlate raw counts + // + s.counts[c.Source]++ + + // Look for allocated but unused collections and count them + if c.MaxSize == 0 && c.Puts == 0 { + if s.unusedCollections == nil { + s.unusedCollections = make(map[CollectionSource]int) + } + s.unusedCollections[c.Source]++ + } + if c.MaxSize > 6000 { + fmt.Println("Collection ", c.Description, "accumulated a max size of ", c.MaxSize, " - this is probably too large and indicates a poorly formed grammar") + } + } + +} diff --git a/vendor/github.com/antlr4-go/antlr/v4/stats_data.go b/vendor/github.com/antlr4-go/antlr/v4/stats_data.go new file mode 100644 index 000000000..4d9eb94e5 --- /dev/null +++ b/vendor/github.com/antlr4-go/antlr/v4/stats_data.go @@ -0,0 +1,23 @@ +package antlr + +// A JStatRec is a record of a particular use of a [JStore], [JMap] or JPCMap] collection. Typically, it will be +// used to look for unused collections that wre allocated anyway, problems with hash bucket clashes, and anomalies +// such as huge numbers of Gets with no entries found GetNoEnt. You can refer to the CollectionAnomalies() function +// for ideas on what can be gleaned from these statistics about collections. +type JStatRec struct { + Source CollectionSource + MaxSize int + CurSize int + Gets int + GetHits int + GetMisses int + GetHashConflicts int + GetNoEnt int + Puts int + PutHits int + PutMisses int + PutHashConflicts int + MaxSlotSize int + Description string + CreateStack []byte +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token.go b/vendor/github.com/antlr4-go/antlr/v4/token.go similarity index 86% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token.go rename to vendor/github.com/antlr4-go/antlr/v4/token.go index f73b06bc6..9670efb82 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token.go +++ b/vendor/github.com/antlr4-go/antlr/v4/token.go @@ -35,6 +35,8 @@ type Token interface { GetTokenSource() TokenSource GetInputStream() CharStream + + String() string } type BaseToken struct { @@ -53,7 +55,7 @@ type BaseToken struct { const ( TokenInvalidType = 0 - // During lookahead operations, this "token" signifies we hit rule end ATN state + // TokenEpsilon - during lookahead operations, this "token" signifies we hit the rule end [ATN] state // and did not follow it despite needing to. TokenEpsilon = -2 @@ -61,15 +63,16 @@ const ( TokenEOF = -1 - // All tokens go to the parser (unless Skip() is called in that rule) + // TokenDefaultChannel is the default channel upon which tokens are sent to the parser. + // + // All tokens go to the parser (unless [Skip] is called in the lexer rule) // on a particular "channel". The parser tunes to a particular channel // so that whitespace etc... can go to the parser on a "hidden" channel. - TokenDefaultChannel = 0 - // Anything on different channel than DEFAULT_CHANNEL is not parsed - // by parser. - + // TokenHiddenChannel defines the normal hidden channel - the parser wil not see tokens that are not on [TokenDefaultChannel]. + // + // Anything on a different channel than TokenDefaultChannel is not parsed by parser. TokenHiddenChannel = 1 ) @@ -118,21 +121,22 @@ func (b *BaseToken) GetInputStream() CharStream { } type CommonToken struct { - *BaseToken + BaseToken } func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken { - t := new(CommonToken) - - t.BaseToken = new(BaseToken) + t := &CommonToken{ + BaseToken: BaseToken{ + source: source, + tokenType: tokenType, + channel: channel, + start: start, + stop: stop, + tokenIndex: -1, + }, + } - t.source = source - t.tokenType = tokenType - t.channel = channel - t.start = start - t.stop = stop - t.tokenIndex = -1 if t.source.tokenSource != nil { t.line = source.tokenSource.GetLine() t.column = source.tokenSource.GetCharPositionInLine() diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token_source.go b/vendor/github.com/antlr4-go/antlr/v4/token_source.go similarity index 100% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token_source.go rename to vendor/github.com/antlr4-go/antlr/v4/token_source.go diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token_stream.go b/vendor/github.com/antlr4-go/antlr/v4/token_stream.go similarity index 90% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token_stream.go rename to vendor/github.com/antlr4-go/antlr/v4/token_stream.go index 1527d43f6..bf4ff6633 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/token_stream.go +++ b/vendor/github.com/antlr4-go/antlr/v4/token_stream.go @@ -8,13 +8,14 @@ type TokenStream interface { IntStream LT(k int) Token + Reset() Get(index int) Token GetTokenSource() TokenSource SetTokenSource(TokenSource) GetAllText() string - GetTextFromInterval(*Interval) string + GetTextFromInterval(Interval) string GetTextFromRuleContext(RuleContext) string GetTextFromTokens(Token, Token) string } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tokenstream_rewriter.go b/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go similarity index 73% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tokenstream_rewriter.go rename to vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go index b3e38af34..ccf59b465 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tokenstream_rewriter.go +++ b/vendor/github.com/antlr4-go/antlr/v4/tokenstream_rewriter.go @@ -86,14 +86,15 @@ import ( // first example shows.

const ( - Default_Program_Name = "default" - Program_Init_Size = 100 - Min_Token_Index = 0 + DefaultProgramName = "default" + ProgramInitSize = 100 + MinTokenIndex = 0 ) // Define the rewrite operation hierarchy type RewriteOperation interface { + // Execute the rewrite operation by possibly adding to the buffer. // Return the index of the next token to operate on. Execute(buffer *bytes.Buffer) int @@ -112,19 +113,19 @@ type RewriteOperation interface { type BaseRewriteOperation struct { //Current index of rewrites list - instruction_index int + instructionIndex int //Token buffer index index int //Substitution text text string //Actual operation name - op_name string + opName string //Pointer to token steam tokens TokenStream } func (op *BaseRewriteOperation) GetInstructionIndex() int { - return op.instruction_index + return op.instructionIndex } func (op *BaseRewriteOperation) GetIndex() int { @@ -136,7 +137,7 @@ func (op *BaseRewriteOperation) GetText() string { } func (op *BaseRewriteOperation) GetOpName() string { - return op.op_name + return op.opName } func (op *BaseRewriteOperation) GetTokens() TokenStream { @@ -144,7 +145,7 @@ func (op *BaseRewriteOperation) GetTokens() TokenStream { } func (op *BaseRewriteOperation) SetInstructionIndex(val int) { - op.instruction_index = val + op.instructionIndex = val } func (op *BaseRewriteOperation) SetIndex(val int) { @@ -156,20 +157,20 @@ func (op *BaseRewriteOperation) SetText(val string) { } func (op *BaseRewriteOperation) SetOpName(val string) { - op.op_name = val + op.opName = val } func (op *BaseRewriteOperation) SetTokens(val TokenStream) { op.tokens = val } -func (op *BaseRewriteOperation) Execute(buffer *bytes.Buffer) int { +func (op *BaseRewriteOperation) Execute(_ *bytes.Buffer) int { return op.index } func (op *BaseRewriteOperation) String() string { return fmt.Sprintf("<%s@%d:\"%s\">", - op.op_name, + op.opName, op.tokens.Get(op.GetIndex()), op.text, ) @@ -182,10 +183,10 @@ type InsertBeforeOp struct { func NewInsertBeforeOp(index int, text string, stream TokenStream) *InsertBeforeOp { return &InsertBeforeOp{BaseRewriteOperation: BaseRewriteOperation{ - index: index, - text: text, - op_name: "InsertBeforeOp", - tokens: stream, + index: index, + text: text, + opName: "InsertBeforeOp", + tokens: stream, }} } @@ -201,20 +202,21 @@ func (op *InsertBeforeOp) String() string { return op.BaseRewriteOperation.String() } -// Distinguish between insert after/before to do the "insert afters" -// first and then the "insert befores" at same index. Implementation -// of "insert after" is "insert before index+1". - +// InsertAfterOp distinguishes between insert after/before to do the "insert after" instructions +// first and then the "insert before" instructions at same index. Implementation +// of "insert after" is "insert before index+1". type InsertAfterOp struct { BaseRewriteOperation } func NewInsertAfterOp(index int, text string, stream TokenStream) *InsertAfterOp { - return &InsertAfterOp{BaseRewriteOperation: BaseRewriteOperation{ - index: index + 1, - text: text, - tokens: stream, - }} + return &InsertAfterOp{ + BaseRewriteOperation: BaseRewriteOperation{ + index: index + 1, + text: text, + tokens: stream, + }, + } } func (op *InsertAfterOp) Execute(buffer *bytes.Buffer) int { @@ -229,7 +231,7 @@ func (op *InsertAfterOp) String() string { return op.BaseRewriteOperation.String() } -// I'm going to try replacing range from x..y with (y-x)+1 ReplaceOp +// ReplaceOp tries to replace range from x..y with (y-x)+1 ReplaceOp // instructions. type ReplaceOp struct { BaseRewriteOperation @@ -239,10 +241,10 @@ type ReplaceOp struct { func NewReplaceOp(from, to int, text string, stream TokenStream) *ReplaceOp { return &ReplaceOp{ BaseRewriteOperation: BaseRewriteOperation{ - index: from, - text: text, - op_name: "ReplaceOp", - tokens: stream, + index: from, + text: text, + opName: "ReplaceOp", + tokens: stream, }, LastIndex: to, } @@ -270,17 +272,17 @@ type TokenStreamRewriter struct { // You may have multiple, named streams of rewrite operations. // I'm calling these things "programs." // Maps String (name) → rewrite (List) - programs map[string][]RewriteOperation - last_rewrite_token_indexes map[string]int + programs map[string][]RewriteOperation + lastRewriteTokenIndexes map[string]int } func NewTokenStreamRewriter(tokens TokenStream) *TokenStreamRewriter { return &TokenStreamRewriter{ tokens: tokens, programs: map[string][]RewriteOperation{ - Default_Program_Name: make([]RewriteOperation, 0, Program_Init_Size), + DefaultProgramName: make([]RewriteOperation, 0, ProgramInitSize), }, - last_rewrite_token_indexes: map[string]int{}, + lastRewriteTokenIndexes: map[string]int{}, } } @@ -291,110 +293,110 @@ func (tsr *TokenStreamRewriter) GetTokenStream() TokenStream { // Rollback the instruction stream for a program so that // the indicated instruction (via instructionIndex) is no // longer in the stream. UNTESTED! -func (tsr *TokenStreamRewriter) Rollback(program_name string, instruction_index int) { - is, ok := tsr.programs[program_name] +func (tsr *TokenStreamRewriter) Rollback(programName string, instructionIndex int) { + is, ok := tsr.programs[programName] if ok { - tsr.programs[program_name] = is[Min_Token_Index:instruction_index] + tsr.programs[programName] = is[MinTokenIndex:instructionIndex] } } -func (tsr *TokenStreamRewriter) RollbackDefault(instruction_index int) { - tsr.Rollback(Default_Program_Name, instruction_index) +func (tsr *TokenStreamRewriter) RollbackDefault(instructionIndex int) { + tsr.Rollback(DefaultProgramName, instructionIndex) } -// Reset the program so that no instructions exist -func (tsr *TokenStreamRewriter) DeleteProgram(program_name string) { - tsr.Rollback(program_name, Min_Token_Index) //TODO: double test on that cause lower bound is not included +// DeleteProgram Reset the program so that no instructions exist +func (tsr *TokenStreamRewriter) DeleteProgram(programName string) { + tsr.Rollback(programName, MinTokenIndex) //TODO: double test on that cause lower bound is not included } func (tsr *TokenStreamRewriter) DeleteProgramDefault() { - tsr.DeleteProgram(Default_Program_Name) + tsr.DeleteProgram(DefaultProgramName) } -func (tsr *TokenStreamRewriter) InsertAfter(program_name string, index int, text string) { +func (tsr *TokenStreamRewriter) InsertAfter(programName string, index int, text string) { // to insert after, just insert before next index (even if past end) var op RewriteOperation = NewInsertAfterOp(index, text, tsr.tokens) - rewrites := tsr.GetProgram(program_name) + rewrites := tsr.GetProgram(programName) op.SetInstructionIndex(len(rewrites)) - tsr.AddToProgram(program_name, op) + tsr.AddToProgram(programName, op) } func (tsr *TokenStreamRewriter) InsertAfterDefault(index int, text string) { - tsr.InsertAfter(Default_Program_Name, index, text) + tsr.InsertAfter(DefaultProgramName, index, text) } -func (tsr *TokenStreamRewriter) InsertAfterToken(program_name string, token Token, text string) { - tsr.InsertAfter(program_name, token.GetTokenIndex(), text) +func (tsr *TokenStreamRewriter) InsertAfterToken(programName string, token Token, text string) { + tsr.InsertAfter(programName, token.GetTokenIndex(), text) } -func (tsr *TokenStreamRewriter) InsertBefore(program_name string, index int, text string) { +func (tsr *TokenStreamRewriter) InsertBefore(programName string, index int, text string) { var op RewriteOperation = NewInsertBeforeOp(index, text, tsr.tokens) - rewrites := tsr.GetProgram(program_name) + rewrites := tsr.GetProgram(programName) op.SetInstructionIndex(len(rewrites)) - tsr.AddToProgram(program_name, op) + tsr.AddToProgram(programName, op) } func (tsr *TokenStreamRewriter) InsertBeforeDefault(index int, text string) { - tsr.InsertBefore(Default_Program_Name, index, text) + tsr.InsertBefore(DefaultProgramName, index, text) } -func (tsr *TokenStreamRewriter) InsertBeforeToken(program_name string, token Token, text string) { - tsr.InsertBefore(program_name, token.GetTokenIndex(), text) +func (tsr *TokenStreamRewriter) InsertBeforeToken(programName string, token Token, text string) { + tsr.InsertBefore(programName, token.GetTokenIndex(), text) } -func (tsr *TokenStreamRewriter) Replace(program_name string, from, to int, text string) { +func (tsr *TokenStreamRewriter) Replace(programName string, from, to int, text string) { if from > to || from < 0 || to < 0 || to >= tsr.tokens.Size() { panic(fmt.Sprintf("replace: range invalid: %d..%d(size=%d)", from, to, tsr.tokens.Size())) } var op RewriteOperation = NewReplaceOp(from, to, text, tsr.tokens) - rewrites := tsr.GetProgram(program_name) + rewrites := tsr.GetProgram(programName) op.SetInstructionIndex(len(rewrites)) - tsr.AddToProgram(program_name, op) + tsr.AddToProgram(programName, op) } func (tsr *TokenStreamRewriter) ReplaceDefault(from, to int, text string) { - tsr.Replace(Default_Program_Name, from, to, text) + tsr.Replace(DefaultProgramName, from, to, text) } func (tsr *TokenStreamRewriter) ReplaceDefaultPos(index int, text string) { tsr.ReplaceDefault(index, index, text) } -func (tsr *TokenStreamRewriter) ReplaceToken(program_name string, from, to Token, text string) { - tsr.Replace(program_name, from.GetTokenIndex(), to.GetTokenIndex(), text) +func (tsr *TokenStreamRewriter) ReplaceToken(programName string, from, to Token, text string) { + tsr.Replace(programName, from.GetTokenIndex(), to.GetTokenIndex(), text) } func (tsr *TokenStreamRewriter) ReplaceTokenDefault(from, to Token, text string) { - tsr.ReplaceToken(Default_Program_Name, from, to, text) + tsr.ReplaceToken(DefaultProgramName, from, to, text) } func (tsr *TokenStreamRewriter) ReplaceTokenDefaultPos(index Token, text string) { tsr.ReplaceTokenDefault(index, index, text) } -func (tsr *TokenStreamRewriter) Delete(program_name string, from, to int) { - tsr.Replace(program_name, from, to, "") +func (tsr *TokenStreamRewriter) Delete(programName string, from, to int) { + tsr.Replace(programName, from, to, "") } func (tsr *TokenStreamRewriter) DeleteDefault(from, to int) { - tsr.Delete(Default_Program_Name, from, to) + tsr.Delete(DefaultProgramName, from, to) } func (tsr *TokenStreamRewriter) DeleteDefaultPos(index int) { tsr.DeleteDefault(index, index) } -func (tsr *TokenStreamRewriter) DeleteToken(program_name string, from, to Token) { - tsr.ReplaceToken(program_name, from, to, "") +func (tsr *TokenStreamRewriter) DeleteToken(programName string, from, to Token) { + tsr.ReplaceToken(programName, from, to, "") } func (tsr *TokenStreamRewriter) DeleteTokenDefault(from, to Token) { - tsr.DeleteToken(Default_Program_Name, from, to) + tsr.DeleteToken(DefaultProgramName, from, to) } -func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndex(program_name string) int { - i, ok := tsr.last_rewrite_token_indexes[program_name] +func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndex(programName string) int { + i, ok := tsr.lastRewriteTokenIndexes[programName] if !ok { return -1 } @@ -402,15 +404,15 @@ func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndex(program_name string) in } func (tsr *TokenStreamRewriter) GetLastRewriteTokenIndexDefault() int { - return tsr.GetLastRewriteTokenIndex(Default_Program_Name) + return tsr.GetLastRewriteTokenIndex(DefaultProgramName) } -func (tsr *TokenStreamRewriter) SetLastRewriteTokenIndex(program_name string, i int) { - tsr.last_rewrite_token_indexes[program_name] = i +func (tsr *TokenStreamRewriter) SetLastRewriteTokenIndex(programName string, i int) { + tsr.lastRewriteTokenIndexes[programName] = i } func (tsr *TokenStreamRewriter) InitializeProgram(name string) []RewriteOperation { - is := make([]RewriteOperation, 0, Program_Init_Size) + is := make([]RewriteOperation, 0, ProgramInitSize) tsr.programs[name] = is return is } @@ -429,24 +431,24 @@ func (tsr *TokenStreamRewriter) GetProgram(name string) []RewriteOperation { return is } -// Return the text from the original tokens altered per the +// GetTextDefault returns the text from the original tokens altered per the // instructions given to this rewriter. func (tsr *TokenStreamRewriter) GetTextDefault() string { return tsr.GetText( - Default_Program_Name, + DefaultProgramName, NewInterval(0, tsr.tokens.Size()-1)) } -// Return the text from the original tokens altered per the +// GetText returns the text from the original tokens altered per the // instructions given to this rewriter. -func (tsr *TokenStreamRewriter) GetText(program_name string, interval *Interval) string { - rewrites := tsr.programs[program_name] +func (tsr *TokenStreamRewriter) GetText(programName string, interval Interval) string { + rewrites := tsr.programs[programName] start := interval.Start stop := interval.Stop // ensure start/end are in range stop = min(stop, tsr.tokens.Size()-1) start = max(start, 0) - if rewrites == nil || len(rewrites) == 0 { + if len(rewrites) == 0 { return tsr.tokens.GetTextFromInterval(interval) // no instructions to execute } buf := bytes.Buffer{} @@ -482,11 +484,13 @@ func (tsr *TokenStreamRewriter) GetText(program_name string, interval *Interval) return buf.String() } -// We need to combine operations and report invalid operations (like -// overlapping replaces that are not completed nested). Inserts to -// same index need to be combined etc... Here are the cases: +// reduceToSingleOperationPerIndex combines operations and report invalid operations (like +// overlapping replaces that are not completed nested). Inserts to +// same index need to be combined etc... +// +// Here are the cases: // -// I.i.u I.j.v leave alone, nonoverlapping +// I.i.u I.j.v leave alone, non-overlapping // I.i.u I.i.v combine: Iivu // // R.i-j.u R.x-y.v | i-j in x-y delete first R @@ -498,38 +502,38 @@ func (tsr *TokenStreamRewriter) GetText(program_name string, interval *Interval) // D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right) // // I.i.u R.x-y.v | i in (x+1)-y delete I (since insert before -// we're not deleting i) -// I.i.u R.x-y.v | i not in (x+1)-y leave alone, nonoverlapping +// we're not deleting i) +// I.i.u R.x-y.v | i not in (x+1)-y leave alone, non-overlapping // R.x-y.v I.i.u | i in x-y ERROR // R.x-y.v I.x.u R.x-y.uv (combine, delete I) -// R.x-y.v I.i.u | i not in x-y leave alone, nonoverlapping +// R.x-y.v I.i.u | i not in x-y leave alone, non-overlapping // // I.i.u = insert u before op @ index i // R.x-y.u = replace x-y indexed tokens with u // -// First we need to examine replaces. For any replace op: +// First we need to examine replaces. For any replace op: // -// 1. wipe out any insertions before op within that range. -// 2. Drop any replace op before that is contained completely within -// that range. -// 3. Throw exception upon boundary overlap with any previous replace. +// 1. wipe out any insertions before op within that range. +// 2. Drop any replace op before that is contained completely within +// that range. +// 3. Throw exception upon boundary overlap with any previous replace. // -// Then we can deal with inserts: +// Then we can deal with inserts: // -// 1. for any inserts to same index, combine even if not adjacent. -// 2. for any prior replace with same left boundary, combine this -// insert with replace and delete this replace. -// 3. throw exception if index in same range as previous replace +// 1. for any inserts to same index, combine even if not adjacent. +// 2. for any prior replace with same left boundary, combine this +// insert with replace and delete this 'replace'. +// 3. throw exception if index in same range as previous replace // -// Don't actually delete; make op null in list. Easier to walk list. -// Later we can throw as we add to index → op map. +// Don't actually delete; make op null in list. Easier to walk list. +// Later we can throw as we add to index → op map. // -// Note that I.2 R.2-2 will wipe out I.2 even though, technically, the -// inserted stuff would be before the replace range. But, if you -// add tokens in front of a method body '{' and then delete the method -// body, I think the stuff before the '{' you added should disappear too. +// Note that I.2 R.2-2 will wipe out I.2 even though, technically, the +// inserted stuff would be before the 'replace' range. But, if you +// add tokens in front of a method body '{' and then delete the method +// body, I think the stuff before the '{' you added should disappear too. // -// Return a map from token index to operation. +// The func returns a map from token index to operation. func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]RewriteOperation { // WALK REPLACES for i := 0; i < len(rewrites); i++ { @@ -547,7 +551,7 @@ func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]Rewrit if iop.index == rop.index { // E.g., insert before 2, delete 2..2; update replace // text to include insert before, kill insert - rewrites[iop.instruction_index] = nil + rewrites[iop.instructionIndex] = nil if rop.text != "" { rop.text = iop.text + rop.text } else { @@ -555,7 +559,7 @@ func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]Rewrit } } else if iop.index > rop.index && iop.index <= rop.LastIndex { // delete insert as it's a no-op. - rewrites[iop.instruction_index] = nil + rewrites[iop.instructionIndex] = nil } } } @@ -564,7 +568,7 @@ func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]Rewrit if prevop, ok := rewrites[j].(*ReplaceOp); ok { if prevop.index >= rop.index && prevop.LastIndex <= rop.LastIndex { // delete replace as it's a no-op. - rewrites[prevop.instruction_index] = nil + rewrites[prevop.instructionIndex] = nil continue } // throw exception unless disjoint or identical @@ -572,10 +576,9 @@ func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]Rewrit // Delete special case of replace (text==null): // D.i-j.u D.x-y.v | boundaries overlap combine to max(min)..max(right) if prevop.text == "" && rop.text == "" && !disjoint { - rewrites[prevop.instruction_index] = nil + rewrites[prevop.instructionIndex] = nil rop.index = min(prevop.index, rop.index) rop.LastIndex = max(prevop.LastIndex, rop.LastIndex) - println("new rop" + rop.String()) //TODO: remove console write, taken from Java version } else if !disjoint { panic("replace op boundaries of " + rop.String() + " overlap with previous " + prevop.String()) } @@ -607,7 +610,7 @@ func reduceToSingleOperationPerIndex(rewrites []RewriteOperation) map[int]Rewrit if prevIop, ok := rewrites[j].(*InsertBeforeOp); ok { if prevIop.index == iop.GetIndex() { iop.SetText(iop.GetText() + prevIop.text) - rewrites[prevIop.instruction_index] = nil + rewrites[prevIop.instructionIndex] = nil } } } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/trace_listener.go b/vendor/github.com/antlr4-go/antlr/v4/trace_listener.go similarity index 100% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/trace_listener.go rename to vendor/github.com/antlr4-go/antlr/v4/trace_listener.go diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/transition.go b/vendor/github.com/antlr4-go/antlr/v4/transition.go similarity index 67% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/transition.go rename to vendor/github.com/antlr4-go/antlr/v4/transition.go index 36be4f733..313b0fc12 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/transition.go +++ b/vendor/github.com/antlr4-go/antlr/v4/transition.go @@ -72,7 +72,7 @@ func (t *BaseTransition) getSerializationType() int { return t.serializationType } -func (t *BaseTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *BaseTransition) Matches(_, _, _ int) bool { panic("Not implemented") } @@ -89,6 +89,7 @@ const ( TransitionPRECEDENCE = 10 ) +//goland:noinspection GoUnusedGlobalVariable var TransitionserializationNames = []string{ "INVALID", "EPSILON", @@ -127,19 +128,22 @@ var TransitionserializationNames = []string{ // TransitionPRECEDENCE //} +// AtomTransition // TODO: make all transitions sets? no, should remove set edges type AtomTransition struct { - *BaseTransition + BaseTransition } func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition { - - t := new(AtomTransition) - t.BaseTransition = NewBaseTransition(target) - - t.label = intervalSet // The token type or character value or, signifies special intervalSet. + t := &AtomTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionATOM, + label: intervalSet, + isEpsilon: false, + }, + } t.intervalSet = t.makeLabel() - t.serializationType = TransitionATOM return t } @@ -150,7 +154,7 @@ func (t *AtomTransition) makeLabel() *IntervalSet { return s } -func (t *AtomTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *AtomTransition) Matches(symbol, _, _ int) bool { return t.label == symbol } @@ -159,48 +163,45 @@ func (t *AtomTransition) String() string { } type RuleTransition struct { - *BaseTransition - + BaseTransition followState ATNState ruleIndex, precedence int } func NewRuleTransition(ruleStart ATNState, ruleIndex, precedence int, followState ATNState) *RuleTransition { - - t := new(RuleTransition) - t.BaseTransition = NewBaseTransition(ruleStart) - - t.ruleIndex = ruleIndex - t.precedence = precedence - t.followState = followState - t.serializationType = TransitionRULE - t.isEpsilon = true - - return t + return &RuleTransition{ + BaseTransition: BaseTransition{ + target: ruleStart, + isEpsilon: true, + serializationType: TransitionRULE, + }, + ruleIndex: ruleIndex, + precedence: precedence, + followState: followState, + } } -func (t *RuleTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *RuleTransition) Matches(_, _, _ int) bool { return false } type EpsilonTransition struct { - *BaseTransition - + BaseTransition outermostPrecedenceReturn int } func NewEpsilonTransition(target ATNState, outermostPrecedenceReturn int) *EpsilonTransition { - - t := new(EpsilonTransition) - t.BaseTransition = NewBaseTransition(target) - - t.serializationType = TransitionEPSILON - t.isEpsilon = true - t.outermostPrecedenceReturn = outermostPrecedenceReturn - return t + return &EpsilonTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionEPSILON, + isEpsilon: true, + }, + outermostPrecedenceReturn: outermostPrecedenceReturn, + } } -func (t *EpsilonTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *EpsilonTransition) Matches(_, _, _ int) bool { return false } @@ -209,19 +210,20 @@ func (t *EpsilonTransition) String() string { } type RangeTransition struct { - *BaseTransition - + BaseTransition start, stop int } func NewRangeTransition(target ATNState, start, stop int) *RangeTransition { - - t := new(RangeTransition) - t.BaseTransition = NewBaseTransition(target) - - t.serializationType = TransitionRANGE - t.start = start - t.stop = stop + t := &RangeTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionRANGE, + isEpsilon: false, + }, + start: start, + stop: stop, + } t.intervalSet = t.makeLabel() return t } @@ -232,7 +234,7 @@ func (t *RangeTransition) makeLabel() *IntervalSet { return s } -func (t *RangeTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *RangeTransition) Matches(symbol, _, _ int) bool { return symbol >= t.start && symbol <= t.stop } @@ -252,40 +254,41 @@ type AbstractPredicateTransition interface { } type BaseAbstractPredicateTransition struct { - *BaseTransition + BaseTransition } func NewBasePredicateTransition(target ATNState) *BaseAbstractPredicateTransition { - - t := new(BaseAbstractPredicateTransition) - t.BaseTransition = NewBaseTransition(target) - - return t + return &BaseAbstractPredicateTransition{ + BaseTransition: BaseTransition{ + target: target, + }, + } } func (a *BaseAbstractPredicateTransition) IAbstractPredicateTransitionFoo() {} type PredicateTransition struct { - *BaseAbstractPredicateTransition - + BaseAbstractPredicateTransition isCtxDependent bool ruleIndex, predIndex int } func NewPredicateTransition(target ATNState, ruleIndex, predIndex int, isCtxDependent bool) *PredicateTransition { - - t := new(PredicateTransition) - t.BaseAbstractPredicateTransition = NewBasePredicateTransition(target) - - t.serializationType = TransitionPREDICATE - t.ruleIndex = ruleIndex - t.predIndex = predIndex - t.isCtxDependent = isCtxDependent // e.g., $i ref in pred - t.isEpsilon = true - return t + return &PredicateTransition{ + BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionPREDICATE, + isEpsilon: true, + }, + }, + isCtxDependent: isCtxDependent, + ruleIndex: ruleIndex, + predIndex: predIndex, + } } -func (t *PredicateTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *PredicateTransition) Matches(_, _, _ int) bool { return false } @@ -298,26 +301,25 @@ func (t *PredicateTransition) String() string { } type ActionTransition struct { - *BaseTransition - + BaseTransition isCtxDependent bool ruleIndex, actionIndex, predIndex int } func NewActionTransition(target ATNState, ruleIndex, actionIndex int, isCtxDependent bool) *ActionTransition { - - t := new(ActionTransition) - t.BaseTransition = NewBaseTransition(target) - - t.serializationType = TransitionACTION - t.ruleIndex = ruleIndex - t.actionIndex = actionIndex - t.isCtxDependent = isCtxDependent // e.g., $i ref in pred - t.isEpsilon = true - return t + return &ActionTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionACTION, + isEpsilon: true, + }, + isCtxDependent: isCtxDependent, + ruleIndex: ruleIndex, + actionIndex: actionIndex, + } } -func (t *ActionTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *ActionTransition) Matches(_, _, _ int) bool { return false } @@ -326,26 +328,27 @@ func (t *ActionTransition) String() string { } type SetTransition struct { - *BaseTransition + BaseTransition } func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition { + t := &SetTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionSET, + }, + } - t := new(SetTransition) - t.BaseTransition = NewBaseTransition(target) - - t.serializationType = TransitionSET if set != nil { t.intervalSet = set } else { t.intervalSet = NewIntervalSet() t.intervalSet.addOne(TokenInvalidType) } - return t } -func (t *SetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *SetTransition) Matches(symbol, _, _ int) bool { return t.intervalSet.contains(symbol) } @@ -354,16 +357,24 @@ func (t *SetTransition) String() string { } type NotSetTransition struct { - *SetTransition + SetTransition } func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition { - - t := new(NotSetTransition) - - t.SetTransition = NewSetTransition(target, set) - - t.serializationType = TransitionNOTSET + t := &NotSetTransition{ + SetTransition: SetTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionNOTSET, + }, + }, + } + if set != nil { + t.intervalSet = set + } else { + t.intervalSet = NewIntervalSet() + t.intervalSet.addOne(TokenInvalidType) + } return t } @@ -377,16 +388,16 @@ func (t *NotSetTransition) String() string { } type WildcardTransition struct { - *BaseTransition + BaseTransition } func NewWildcardTransition(target ATNState) *WildcardTransition { - - t := new(WildcardTransition) - t.BaseTransition = NewBaseTransition(target) - - t.serializationType = TransitionWILDCARD - return t + return &WildcardTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionWILDCARD, + }, + } } func (t *WildcardTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { @@ -398,24 +409,24 @@ func (t *WildcardTransition) String() string { } type PrecedencePredicateTransition struct { - *BaseAbstractPredicateTransition - + BaseAbstractPredicateTransition precedence int } func NewPrecedencePredicateTransition(target ATNState, precedence int) *PrecedencePredicateTransition { - - t := new(PrecedencePredicateTransition) - t.BaseAbstractPredicateTransition = NewBasePredicateTransition(target) - - t.serializationType = TransitionPRECEDENCE - t.precedence = precedence - t.isEpsilon = true - - return t + return &PrecedencePredicateTransition{ + BaseAbstractPredicateTransition: BaseAbstractPredicateTransition{ + BaseTransition: BaseTransition{ + target: target, + serializationType: TransitionPRECEDENCE, + isEpsilon: true, + }, + }, + precedence: precedence, + } } -func (t *PrecedencePredicateTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { +func (t *PrecedencePredicateTransition) Matches(_, _, _ int) bool { return false } diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tree.go b/vendor/github.com/antlr4-go/antlr/v4/tree.go similarity index 62% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tree.go rename to vendor/github.com/antlr4-go/antlr/v4/tree.go index 85b4f137b..c288420fb 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/tree.go +++ b/vendor/github.com/antlr4-go/antlr/v4/tree.go @@ -21,29 +21,23 @@ type Tree interface { type SyntaxTree interface { Tree - - GetSourceInterval() *Interval + GetSourceInterval() Interval } type ParseTree interface { SyntaxTree - Accept(Visitor ParseTreeVisitor) interface{} GetText() string - ToStringTree([]string, Recognizer) string } type RuleNode interface { ParseTree - GetRuleContext() RuleContext - GetBaseRuleContext() *BaseRuleContext } type TerminalNode interface { ParseTree - GetSymbol() Token } @@ -64,12 +58,12 @@ type BaseParseTreeVisitor struct{} var _ ParseTreeVisitor = &BaseParseTreeVisitor{} -func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) } -func (v *BaseParseTreeVisitor) VisitChildren(node RuleNode) interface{} { return nil } -func (v *BaseParseTreeVisitor) VisitTerminal(node TerminalNode) interface{} { return nil } -func (v *BaseParseTreeVisitor) VisitErrorNode(node ErrorNode) interface{} { return nil } +func (v *BaseParseTreeVisitor) Visit(tree ParseTree) interface{} { return tree.Accept(v) } +func (v *BaseParseTreeVisitor) VisitChildren(_ RuleNode) interface{} { return nil } +func (v *BaseParseTreeVisitor) VisitTerminal(_ TerminalNode) interface{} { return nil } +func (v *BaseParseTreeVisitor) VisitErrorNode(_ ErrorNode) interface{} { return nil } -// TODO +// TODO: Implement this? //func (this ParseTreeVisitor) Visit(ctx) { // if (Utils.isArray(ctx)) { // self := this @@ -101,15 +95,14 @@ type BaseParseTreeListener struct{} var _ ParseTreeListener = &BaseParseTreeListener{} -func (l *BaseParseTreeListener) VisitTerminal(node TerminalNode) {} -func (l *BaseParseTreeListener) VisitErrorNode(node ErrorNode) {} -func (l *BaseParseTreeListener) EnterEveryRule(ctx ParserRuleContext) {} -func (l *BaseParseTreeListener) ExitEveryRule(ctx ParserRuleContext) {} +func (l *BaseParseTreeListener) VisitTerminal(_ TerminalNode) {} +func (l *BaseParseTreeListener) VisitErrorNode(_ ErrorNode) {} +func (l *BaseParseTreeListener) EnterEveryRule(_ ParserRuleContext) {} +func (l *BaseParseTreeListener) ExitEveryRule(_ ParserRuleContext) {} type TerminalNodeImpl struct { parentCtx RuleContext - - symbol Token + symbol Token } var _ TerminalNode = &TerminalNodeImpl{} @@ -123,7 +116,7 @@ func NewTerminalNodeImpl(symbol Token) *TerminalNodeImpl { return tn } -func (t *TerminalNodeImpl) GetChild(i int) Tree { +func (t *TerminalNodeImpl) GetChild(_ int) Tree { return nil } @@ -131,7 +124,7 @@ func (t *TerminalNodeImpl) GetChildren() []Tree { return nil } -func (t *TerminalNodeImpl) SetChildren(tree []Tree) { +func (t *TerminalNodeImpl) SetChildren(_ []Tree) { panic("Cannot set children on terminal node") } @@ -151,7 +144,7 @@ func (t *TerminalNodeImpl) GetPayload() interface{} { return t.symbol } -func (t *TerminalNodeImpl) GetSourceInterval() *Interval { +func (t *TerminalNodeImpl) GetSourceInterval() Interval { if t.symbol == nil { return TreeInvalidInterval } @@ -179,7 +172,7 @@ func (t *TerminalNodeImpl) String() string { return t.symbol.GetText() } -func (t *TerminalNodeImpl) ToStringTree(s []string, r Recognizer) string { +func (t *TerminalNodeImpl) ToStringTree(_ []string, _ Recognizer) string { return t.String() } @@ -214,10 +207,9 @@ func NewParseTreeWalker() *ParseTreeWalker { return new(ParseTreeWalker) } -// Performs a walk on the given parse tree starting at the root and going down recursively -// with depth-first search. On each node, EnterRule is called before -// recursively walking down into child nodes, then -// ExitRule is called after the recursive call to wind up. +// Walk performs a walk on the given parse tree starting at the root and going down recursively +// with depth-first search. On each node, [EnterRule] is called before +// recursively walking down into child nodes, then [ExitRule] is called after the recursive call to wind up. func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) { switch tt := t.(type) { case ErrorNode: @@ -234,7 +226,7 @@ func (p *ParseTreeWalker) Walk(listener ParseTreeListener, t Tree) { } } -// Enters a grammar rule by first triggering the generic event {@link ParseTreeListener//EnterEveryRule} +// EnterRule enters a grammar rule by first triggering the generic event [ParseTreeListener].[EnterEveryRule] // then by triggering the event specific to the given parse tree node func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) { ctx := r.GetRuleContext().(ParserRuleContext) @@ -242,12 +234,71 @@ func (p *ParseTreeWalker) EnterRule(listener ParseTreeListener, r RuleNode) { ctx.EnterRule(listener) } -// Exits a grammar rule by first triggering the event specific to the given parse tree node -// then by triggering the generic event {@link ParseTreeListener//ExitEveryRule} +// ExitRule exits a grammar rule by first triggering the event specific to the given parse tree node +// then by triggering the generic event [ParseTreeListener].ExitEveryRule func (p *ParseTreeWalker) ExitRule(listener ParseTreeListener, r RuleNode) { ctx := r.GetRuleContext().(ParserRuleContext) ctx.ExitRule(listener) listener.ExitEveryRule(ctx) } +//goland:noinspection GoUnusedGlobalVariable var ParseTreeWalkerDefault = NewParseTreeWalker() + +type IterativeParseTreeWalker struct { + *ParseTreeWalker +} + +//goland:noinspection GoUnusedExportedFunction +func NewIterativeParseTreeWalker() *IterativeParseTreeWalker { + return new(IterativeParseTreeWalker) +} + +func (i *IterativeParseTreeWalker) Walk(listener ParseTreeListener, t Tree) { + var stack []Tree + var indexStack []int + currentNode := t + currentIndex := 0 + + for currentNode != nil { + // pre-order visit + switch tt := currentNode.(type) { + case ErrorNode: + listener.VisitErrorNode(tt) + case TerminalNode: + listener.VisitTerminal(tt) + default: + i.EnterRule(listener, currentNode.(RuleNode)) + } + // Move down to first child, if exists + if currentNode.GetChildCount() > 0 { + stack = append(stack, currentNode) + indexStack = append(indexStack, currentIndex) + currentIndex = 0 + currentNode = currentNode.GetChild(0) + continue + } + + for { + // post-order visit + if ruleNode, ok := currentNode.(RuleNode); ok { + i.ExitRule(listener, ruleNode) + } + // No parent, so no siblings + if len(stack) == 0 { + currentNode = nil + currentIndex = 0 + break + } + // Move to next sibling if possible + currentIndex++ + if stack[len(stack)-1].GetChildCount() > currentIndex { + currentNode = stack[len(stack)-1].GetChild(currentIndex) + break + } + // No next, sibling, so move up + currentNode, stack = stack[len(stack)-1], stack[:len(stack)-1] + currentIndex, indexStack = indexStack[len(indexStack)-1], indexStack[:len(indexStack)-1] + } + } +} diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/trees.go b/vendor/github.com/antlr4-go/antlr/v4/trees.go similarity index 81% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/trees.go rename to vendor/github.com/antlr4-go/antlr/v4/trees.go index d7dbb0322..f44c05d81 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/trees.go +++ b/vendor/github.com/antlr4-go/antlr/v4/trees.go @@ -8,10 +8,8 @@ import "fmt" /** A set of utility routines useful for all kinds of ANTLR trees. */ -// Print out a whole tree in LISP form. {@link //getNodeText} is used on the -// -// node payloads to get the text for the nodes. Detect -// parse trees and extract data appropriately. +// TreesStringTree prints out a whole tree in LISP form. [getNodeText] is used on the +// node payloads to get the text for the nodes. Detects parse trees and extracts data appropriately. func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string { if recog != nil { @@ -32,7 +30,7 @@ func TreesStringTree(tree Tree, ruleNames []string, recog Recognizer) string { } for i := 1; i < c; i++ { s = TreesStringTree(tree.GetChild(i), ruleNames, nil) - res += (" " + s) + res += " " + s } res += ")" return res @@ -62,7 +60,7 @@ func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string { } } - // no recog for rule names + // no recognition for rule names payload := t.GetPayload() if p2, ok := payload.(Token); ok { return p2.GetText() @@ -71,7 +69,9 @@ func TreesGetNodeText(t Tree, ruleNames []string, recog Parser) string { return fmt.Sprint(t.GetPayload()) } -// Return ordered list of all children of this node +// TreesGetChildren returns am ordered list of all children of this node +// +//goland:noinspection GoUnusedExportedFunction func TreesGetChildren(t Tree) []Tree { list := make([]Tree, 0) for i := 0; i < t.GetChildCount(); i++ { @@ -80,9 +80,10 @@ func TreesGetChildren(t Tree) []Tree { return list } -// Return a list of all ancestors of this node. The first node of +// TreesgetAncestors returns a list of all ancestors of this node. The first node of list is the root +// and the last node is the parent of this node. // -// list is the root and the last is the parent of this node. +//goland:noinspection GoUnusedExportedFunction func TreesgetAncestors(t Tree) []Tree { ancestors := make([]Tree, 0) t = t.GetParent() @@ -94,10 +95,12 @@ func TreesgetAncestors(t Tree) []Tree { return ancestors } +//goland:noinspection GoUnusedExportedFunction func TreesFindAllTokenNodes(t ParseTree, ttype int) []ParseTree { return TreesfindAllNodes(t, ttype, true) } +//goland:noinspection GoUnusedExportedFunction func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree { return TreesfindAllNodes(t, ruleIndex, false) } @@ -129,6 +132,7 @@ func treesFindAllNodes(t ParseTree, index int, findTokens bool, nodes *[]ParseTr } } +//goland:noinspection GoUnusedExportedFunction func TreesDescendants(t ParseTree) []ParseTree { nodes := []ParseTree{t} for i := 0; i < t.GetChildCount(); i++ { diff --git a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils.go b/vendor/github.com/antlr4-go/antlr/v4/utils.go similarity index 85% rename from vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils.go rename to vendor/github.com/antlr4-go/antlr/v4/utils.go index 9fad5d916..733d7df9d 100644 --- a/vendor/github.com/antlr/antlr4/runtime/Go/antlr/v4/utils.go +++ b/vendor/github.com/antlr4-go/antlr/v4/utils.go @@ -9,8 +9,10 @@ import ( "errors" "fmt" "math/bits" + "os" "strconv" "strings" + "syscall" ) func intMin(a, b int) int { @@ -31,7 +33,7 @@ func intMax(a, b int) int { type IntStack []int -var ErrEmptyStack = errors.New("Stack is empty") +var ErrEmptyStack = errors.New("stack is empty") func (s *IntStack) Pop() (int, error) { l := len(*s) - 1 @@ -47,33 +49,13 @@ func (s *IntStack) Push(e int) { *s = append(*s, e) } -type comparable interface { - Equals(other Collectable[any]) bool -} - -func standardEqualsFunction(a Collectable[any], b Collectable[any]) bool { - - return a.Equals(b) -} - -func standardHashFunction(a interface{}) int { - if h, ok := a.(hasher); ok { - return h.Hash() - } - - panic("Not Hasher") -} - -type hasher interface { - Hash() int -} - const bitsPerWord = 64 func indexForBit(bit int) int { return bit / bitsPerWord } +//goland:noinspection GoUnusedExportedFunction,GoUnusedFunction func wordForBit(data []uint64, bit int) uint64 { idx := indexForBit(bit) if idx >= len(data) { @@ -94,6 +76,8 @@ type BitSet struct { data []uint64 } +// NewBitSet creates a new bitwise set +// TODO: See if we can replace with the standard library's BitSet func NewBitSet() *BitSet { return &BitSet{} } @@ -123,7 +107,7 @@ func (b *BitSet) or(set *BitSet) { setLen := set.minLen() maxLen := intMax(bLen, setLen) if maxLen > len(b.data) { - // Increase the size of len(b.data) to repesent the bits in both sets. + // Increase the size of len(b.data) to represent the bits in both sets. data := make([]uint64, maxLen) copy(data, b.data) b.data = data @@ -246,37 +230,6 @@ func (a *AltDict) values() []interface{} { return vs } -type DoubleDict struct { - data map[int]map[int]interface{} -} - -func NewDoubleDict() *DoubleDict { - dd := new(DoubleDict) - dd.data = make(map[int]map[int]interface{}) - return dd -} - -func (d *DoubleDict) Get(a, b int) interface{} { - data := d.data[a] - - if data == nil { - return nil - } - - return data[b] -} - -func (d *DoubleDict) set(a, b int, o interface{}) { - data := d.data[a] - - if data == nil { - data = make(map[int]interface{}) - d.data[a] = data - } - - data[b] = o -} - func EscapeWhitespace(s string, escapeSpaces bool) string { s = strings.Replace(s, "\t", "\\t", -1) @@ -288,6 +241,7 @@ func EscapeWhitespace(s string, escapeSpaces bool) string { return s } +//goland:noinspection GoUnusedExportedFunction func TerminalNodeToStringArray(sa []TerminalNode) []string { st := make([]string, len(sa)) @@ -298,6 +252,7 @@ func TerminalNodeToStringArray(sa []TerminalNode) []string { return st } +//goland:noinspection GoUnusedExportedFunction func PrintArrayJavaStyle(sa []string) string { var buffer bytes.Buffer @@ -350,3 +305,24 @@ func murmurFinish(h int, numberOfWords int) int { return int(hash) } + +func isDirectory(dir string) (bool, error) { + fileInfo, err := os.Stat(dir) + if err != nil { + switch { + case errors.Is(err, syscall.ENOENT): + // The given directory does not exist, so we will try to create it + // + err = os.MkdirAll(dir, 0755) + if err != nil { + return false, err + } + + return true, nil + case err != nil: + return false, err + default: + } + } + return fileInfo.IsDir(), err +} diff --git a/vendor/github.com/google/cel-go/cel/BUILD.bazel b/vendor/github.com/google/cel-go/cel/BUILD.bazel index 433132113..33da21623 100644 --- a/vendor/github.com/google/cel-go/cel/BUILD.bazel +++ b/vendor/github.com/google/cel-go/cel/BUILD.bazel @@ -10,11 +10,15 @@ go_library( "cel.go", "decls.go", "env.go", + "folding.go", "io.go", + "inlining.go", "library.go", "macro.go", + "optimizer.go", "options.go", "program.go", + "validator.go", ], importpath = "github.com/google/cel-go/cel", visibility = ["//visibility:public"], @@ -22,15 +26,18 @@ go_library( "//checker:go_default_library", "//checker/decls:go_default_library", "//common:go_default_library", + "//common/ast:go_default_library", "//common/containers:go_default_library", + "//common/decls:go_default_library", + "//common/functions:go_default_library", "//common/operators:go_default_library", "//common/overloads:go_default_library", + "//common/stdlib:go_default_library", "//common/types:go_default_library", "//common/types/pb:go_default_library", "//common/types/ref:go_default_library", "//common/types/traits:go_default_library", "//interpreter:go_default_library", - "//interpreter/functions:go_default_library", "//parser:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", @@ -52,7 +59,11 @@ go_test( "cel_test.go", "decls_test.go", "env_test.go", + "folding_test.go", "io_test.go", + "inlining_test.go", + "optimizer_test.go", + "validator_test.go", ], data = [ "//cel/testdata:gen_test_fds", @@ -72,6 +83,8 @@ go_test( "@io_bazel_rules_go//proto/wkt:descriptor_go_proto", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", + "@org_golang_google_protobuf//encoding/prototext:go_default_library", "@org_golang_google_protobuf//types/known/structpb:go_default_library", + "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/cel/decls.go b/vendor/github.com/google/cel-go/cel/decls.go index c0624d1e5..b59e3708d 100644 --- a/vendor/github.com/google/cel-go/cel/decls.go +++ b/vendor/github.com/google/cel-go/cel/decls.go @@ -16,341 +16,133 @@ package cel import ( "fmt" - "strings" - "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/decls" + "github.com/google/cel-go/common/functions" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" - "github.com/google/cel-go/interpreter/functions" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // Kind indicates a CEL type's kind which is used to differentiate quickly between simple and complex types. -type Kind uint +type Kind = types.Kind const ( // DynKind represents a dynamic type. This kind only exists at type-check time. - DynKind Kind = iota + DynKind Kind = types.DynKind // AnyKind represents a google.protobuf.Any type. This kind only exists at type-check time. - AnyKind + AnyKind = types.AnyKind // BoolKind represents a boolean type. - BoolKind + BoolKind = types.BoolKind // BytesKind represents a bytes type. - BytesKind + BytesKind = types.BytesKind // DoubleKind represents a double type. - DoubleKind + DoubleKind = types.DoubleKind // DurationKind represents a CEL duration type. - DurationKind + DurationKind = types.DurationKind // IntKind represents an integer type. - IntKind + IntKind = types.IntKind // ListKind represents a list type. - ListKind + ListKind = types.ListKind // MapKind represents a map type. - MapKind + MapKind = types.MapKind // NullTypeKind represents a null type. - NullTypeKind + NullTypeKind = types.NullTypeKind // OpaqueKind represents an abstract type which has no accessible fields. - OpaqueKind + OpaqueKind = types.OpaqueKind // StringKind represents a string type. - StringKind + StringKind = types.StringKind // StructKind represents a structured object with typed fields. - StructKind + StructKind = types.StructKind // TimestampKind represents a a CEL time type. - TimestampKind + TimestampKind = types.TimestampKind // TypeKind represents the CEL type. - TypeKind + TypeKind = types.TypeKind // TypeParamKind represents a parameterized type whose type name will be resolved at type-check time, if possible. - TypeParamKind + TypeParamKind = types.TypeParamKind // UintKind represents a uint type. - UintKind + UintKind = types.UintKind ) var ( // AnyType represents the google.protobuf.Any type. - AnyType = &Type{ - kind: AnyKind, - runtimeType: types.NewTypeValue("google.protobuf.Any"), - } + AnyType = types.AnyType // BoolType represents the bool type. - BoolType = &Type{ - kind: BoolKind, - runtimeType: types.BoolType, - } + BoolType = types.BoolType // BytesType represents the bytes type. - BytesType = &Type{ - kind: BytesKind, - runtimeType: types.BytesType, - } + BytesType = types.BytesType // DoubleType represents the double type. - DoubleType = &Type{ - kind: DoubleKind, - runtimeType: types.DoubleType, - } + DoubleType = types.DoubleType // DurationType represents the CEL duration type. - DurationType = &Type{ - kind: DurationKind, - runtimeType: types.DurationType, - } + DurationType = types.DurationType // DynType represents a dynamic CEL type whose type will be determined at runtime from context. - DynType = &Type{ - kind: DynKind, - runtimeType: types.NewTypeValue("dyn"), - } + DynType = types.DynType // IntType represents the int type. - IntType = &Type{ - kind: IntKind, - runtimeType: types.IntType, - } + IntType = types.IntType // NullType represents the type of a null value. - NullType = &Type{ - kind: NullTypeKind, - runtimeType: types.NullType, - } + NullType = types.NullType // StringType represents the string type. - StringType = &Type{ - kind: StringKind, - runtimeType: types.StringType, - } + StringType = types.StringType // TimestampType represents the time type. - TimestampType = &Type{ - kind: TimestampKind, - runtimeType: types.TimestampType, - } + TimestampType = types.TimestampType // TypeType represents a CEL type - TypeType = &Type{ - kind: TypeKind, - runtimeType: types.TypeType, - } + TypeType = types.TypeType // UintType represents a uint type. - UintType = &Type{ - kind: UintKind, - runtimeType: types.UintType, - } + UintType = types.UintType + + // function references for instantiating new types. + + // ListType creates an instances of a list type value with the provided element type. + ListType = types.NewListType + // MapType creates an instance of a map type value with the provided key and value types. + MapType = types.NewMapType + // NullableType creates an instance of a nullable type with the provided wrapped type. + // + // Note: only primitive types are supported as wrapped types. + NullableType = types.NewNullableType + // OptionalType creates an abstract parameterized type instance corresponding to CEL's notion of optional. + OptionalType = types.NewOptionalType + // OpaqueType creates an abstract parameterized type with a given name. + OpaqueType = types.NewOpaqueType + // ObjectType creates a type references to an externally defined type, e.g. a protobuf message type. + ObjectType = types.NewObjectType + // TypeParamType creates a parameterized type instance. + TypeParamType = types.NewTypeParamType ) // Type holds a reference to a runtime type with an optional type-checked set of type parameters. -type Type struct { - // kind indicates general category of the type. - kind Kind - - // runtimeType is the runtime type of the declaration. - runtimeType ref.Type - - // parameters holds the optional type-checked set of type parameters that are used during static analysis. - parameters []*Type - - // isAssignableType function determines whether one type is assignable to this type. - // A nil value for the isAssignableType function falls back to equality of kind, runtimeType, and parameters. - isAssignableType func(other *Type) bool - - // isAssignableRuntimeType function determines whether the runtime type (with erasure) is assignable to this type. - // A nil value for the isAssignableRuntimeType function falls back to the equality of the type or type name. - isAssignableRuntimeType func(other ref.Val) bool -} - -// IsAssignableType determines whether the current type is type-check assignable from the input fromType. -func (t *Type) IsAssignableType(fromType *Type) bool { - if t.isAssignableType != nil { - return t.isAssignableType(fromType) - } - return t.defaultIsAssignableType(fromType) -} - -// IsAssignableRuntimeType determines whether the current type is runtime assignable from the input runtimeType. -// -// At runtime, parameterized types are erased and so a function which type-checks to support a map(string, string) -// will have a runtime assignable type of a map. -func (t *Type) IsAssignableRuntimeType(val ref.Val) bool { - if t.isAssignableRuntimeType != nil { - return t.isAssignableRuntimeType(val) - } - return t.defaultIsAssignableRuntimeType(val) -} - -// String returns a human-readable definition of the type name. -func (t *Type) String() string { - if len(t.parameters) == 0 { - return t.runtimeType.TypeName() - } - params := make([]string, len(t.parameters)) - for i, p := range t.parameters { - params[i] = p.String() - } - return fmt.Sprintf("%s(%s)", t.runtimeType.TypeName(), strings.Join(params, ", ")) -} - -// isDyn indicates whether the type is dynamic in any way. -func (t *Type) isDyn() bool { - return t.kind == DynKind || t.kind == AnyKind || t.kind == TypeParamKind -} - -// equals indicates whether two types have the same kind, type name, and parameters. -func (t *Type) equals(other *Type) bool { - if t.kind != other.kind || - t.runtimeType.TypeName() != other.runtimeType.TypeName() || - len(t.parameters) != len(other.parameters) { - return false - } - for i, p := range t.parameters { - if !p.equals(other.parameters[i]) { - return false - } - } - return true -} +type Type = types.Type -// defaultIsAssignableType provides the standard definition of what it means for one type to be assignable to another -// where any of the following may return a true result: -// - The from types are the same instance -// - The target type is dynamic -// - The fromType has the same kind and type name as the target type, and all parameters of the target type -// -// are IsAssignableType() from the parameters of the fromType. -func (t *Type) defaultIsAssignableType(fromType *Type) bool { - if t == fromType || t.isDyn() { - return true - } - if t.kind != fromType.kind || - t.runtimeType.TypeName() != fromType.runtimeType.TypeName() || - len(t.parameters) != len(fromType.parameters) { - return false - } - for i, tp := range t.parameters { - fp := fromType.parameters[i] - if !tp.IsAssignableType(fp) { - return false - } - } - return true -} - -// defaultIsAssignableRuntimeType inspects the type and in the case of list and map elements, the key and element types -// to determine whether a ref.Val is assignable to the declared type for a function signature. -func (t *Type) defaultIsAssignableRuntimeType(val ref.Val) bool { - valType := val.Type() - if !(t.runtimeType == valType || t.isDyn() || t.runtimeType.TypeName() == valType.TypeName()) { - return false - } - switch t.runtimeType { - case types.ListType: - elemType := t.parameters[0] - l := val.(traits.Lister) - if l.Size() == types.IntZero { - return true - } - it := l.Iterator() - for it.HasNext() == types.True { - elemVal := it.Next() - return elemType.IsAssignableRuntimeType(elemVal) - } - case types.MapType: - keyType := t.parameters[0] - elemType := t.parameters[1] - m := val.(traits.Mapper) - if m.Size() == types.IntZero { - return true - } - it := m.Iterator() - for it.HasNext() == types.True { - keyVal := it.Next() - elemVal := m.Get(keyVal) - return keyType.IsAssignableRuntimeType(keyVal) && elemType.IsAssignableRuntimeType(elemVal) - } - } - return true -} - -// ListType creates an instances of a list type value with the provided element type. -func ListType(elemType *Type) *Type { - return &Type{ - kind: ListKind, - runtimeType: types.ListType, - parameters: []*Type{elemType}, - } -} - -// MapType creates an instance of a map type value with the provided key and value types. -func MapType(keyType, valueType *Type) *Type { - return &Type{ - kind: MapKind, - runtimeType: types.MapType, - parameters: []*Type{keyType, valueType}, - } -} - -// NullableType creates an instance of a nullable type with the provided wrapped type. -// -// Note: only primitive types are supported as wrapped types. -func NullableType(wrapped *Type) *Type { - return &Type{ - kind: wrapped.kind, - runtimeType: wrapped.runtimeType, - parameters: wrapped.parameters, - isAssignableType: func(other *Type) bool { - return NullType.IsAssignableType(other) || wrapped.IsAssignableType(other) - }, - isAssignableRuntimeType: func(other ref.Val) bool { - return NullType.IsAssignableRuntimeType(other) || wrapped.IsAssignableRuntimeType(other) - }, - } -} - -// OptionalType creates an abstract parameterized type instance corresponding to CEL's notion of optional. -func OptionalType(param *Type) *Type { - return OpaqueType("optional", param) -} - -// OpaqueType creates an abstract parameterized type with a given name. -func OpaqueType(name string, params ...*Type) *Type { - return &Type{ - kind: OpaqueKind, - runtimeType: types.NewTypeValue(name), - parameters: params, - } -} - -// ObjectType creates a type references to an externally defined type, e.g. a protobuf message type. -func ObjectType(typeName string) *Type { - return &Type{ - kind: StructKind, - runtimeType: types.NewObjectTypeValue(typeName), - } -} - -// TypeParamType creates a parameterized type instance. -func TypeParamType(paramName string) *Type { - return &Type{ - kind: TypeParamKind, - runtimeType: types.NewTypeValue(paramName), +// Constant creates an instances of an identifier declaration with a variable name, type, and value. +func Constant(name string, t *Type, v ref.Val) EnvOption { + return func(e *Env) (*Env, error) { + e.variables = append(e.variables, decls.NewConstant(name, t, v)) + return e, nil } } // Variable creates an instance of a variable declaration with a variable name and type. func Variable(name string, t *Type) EnvOption { return func(e *Env) (*Env, error) { - et, err := TypeToExprType(t) - if err != nil { - return nil, err - } - e.declarations = append(e.declarations, decls.NewVar(name, et)) + e.variables = append(e.variables, decls.NewVariable(name, t)) return e, nil } } @@ -386,53 +178,30 @@ func Variable(name string, t *Type) EnvOption { // overload as CEL can only make inferences by type-name regarding such types. func Function(name string, opts ...FunctionOpt) EnvOption { return func(e *Env) (*Env, error) { - fn := &functionDecl{ - name: name, - overloads: []*overloadDecl{}, - options: opts, - } - err := fn.init() + fn, err := decls.NewFunction(name, opts...) if err != nil { return nil, err } - _, err = functionDeclToExprDecl(fn) - if err != nil { - return nil, err - } - if existing, found := e.functions[fn.name]; found { - fn, err = existing.merge(fn) + if existing, found := e.functions[fn.Name()]; found { + fn, err = existing.Merge(fn) if err != nil { return nil, err } } - e.functions[name] = fn + e.functions[fn.Name()] = fn return e, nil } } // FunctionOpt defines a functional option for configuring a function declaration. -type FunctionOpt func(*functionDecl) (*functionDecl, error) +type FunctionOpt = decls.FunctionOpt // SingletonUnaryBinding creates a singleton function definition to be used for all function overloads. // // Note, this approach works well if operand is expected to have a specific trait which it implements, // e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. func SingletonUnaryBinding(fn functions.UnaryOp, traits ...int) FunctionOpt { - trait := 0 - for _, t := range traits { - trait = trait | t - } - return func(f *functionDecl) (*functionDecl, error) { - if f.singleton != nil { - return nil, fmt.Errorf("function already has a singleton binding: %s", f.name) - } - f.singleton = &functions.Overload{ - Operator: f.name, - Unary: fn, - OperandTrait: trait, - } - return f, nil - } + return decls.SingletonUnaryBinding(fn, traits...) } // SingletonBinaryImpl creates a singleton function definition to be used with all function overloads. @@ -442,7 +211,7 @@ func SingletonUnaryBinding(fn functions.UnaryOp, traits ...int) FunctionOpt { // // Deprecated: use SingletonBinaryBinding func SingletonBinaryImpl(fn functions.BinaryOp, traits ...int) FunctionOpt { - return SingletonBinaryBinding(fn, traits...) + return decls.SingletonBinaryBinding(fn, traits...) } // SingletonBinaryBinding creates a singleton function definition to be used with all function overloads. @@ -450,21 +219,7 @@ func SingletonBinaryImpl(fn functions.BinaryOp, traits ...int) FunctionOpt { // Note, this approach works well if operand is expected to have a specific trait which it implements, // e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. func SingletonBinaryBinding(fn functions.BinaryOp, traits ...int) FunctionOpt { - trait := 0 - for _, t := range traits { - trait = trait | t - } - return func(f *functionDecl) (*functionDecl, error) { - if f.singleton != nil { - return nil, fmt.Errorf("function already has a singleton binding: %s", f.name) - } - f.singleton = &functions.Overload{ - Operator: f.name, - Binary: fn, - OperandTrait: trait, - } - return f, nil - } + return decls.SingletonBinaryBinding(fn, traits...) } // SingletonFunctionImpl creates a singleton function definition to be used with all function overloads. @@ -474,7 +229,7 @@ func SingletonBinaryBinding(fn functions.BinaryOp, traits ...int) FunctionOpt { // // Deprecated: use SingletonFunctionBinding func SingletonFunctionImpl(fn functions.FunctionOp, traits ...int) FunctionOpt { - return SingletonFunctionBinding(fn, traits...) + return decls.SingletonFunctionBinding(fn, traits...) } // SingletonFunctionBinding creates a singleton function definition to be used with all function overloads. @@ -482,21 +237,13 @@ func SingletonFunctionImpl(fn functions.FunctionOp, traits ...int) FunctionOpt { // Note, this approach works well if operand is expected to have a specific trait which it implements, // e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. func SingletonFunctionBinding(fn functions.FunctionOp, traits ...int) FunctionOpt { - trait := 0 - for _, t := range traits { - trait = trait | t - } - return func(f *functionDecl) (*functionDecl, error) { - if f.singleton != nil { - return nil, fmt.Errorf("function already has a singleton binding: %s", f.name) - } - f.singleton = &functions.Overload{ - Operator: f.name, - Function: fn, - OperandTrait: trait, - } - return f, nil - } + return decls.SingletonFunctionBinding(fn, traits...) +} + +// DisableDeclaration disables the function signatures, effectively removing them from the type-check +// environment while preserving the runtime bindings. +func DisableDeclaration(value bool) FunctionOpt { + return decls.DisableDeclaration(value) } // Overload defines a new global overload with an overload id, argument types, and result type. Through the @@ -506,7 +253,7 @@ func SingletonFunctionBinding(fn functions.FunctionOp, traits ...int) FunctionOp // Note: function bindings should be commonly configured with Overload instances whereas operand traits and // strict-ness should be rare occurrences. func Overload(overloadID string, args []*Type, resultType *Type, opts ...OverloadOpt) FunctionOpt { - return newOverload(overloadID, false, args, resultType, opts...) + return decls.Overload(overloadID, args, resultType, opts...) } // MemberOverload defines a new receiver-style overload (or member function) with an overload id, argument types, @@ -516,609 +263,51 @@ func Overload(overloadID string, args []*Type, resultType *Type, opts ...Overloa // Note: function bindings should be commonly configured with Overload instances whereas operand traits and // strict-ness should be rare occurrences. func MemberOverload(overloadID string, args []*Type, resultType *Type, opts ...OverloadOpt) FunctionOpt { - return newOverload(overloadID, true, args, resultType, opts...) + return decls.MemberOverload(overloadID, args, resultType, opts...) } // OverloadOpt is a functional option for configuring a function overload. -type OverloadOpt func(*overloadDecl) (*overloadDecl, error) +type OverloadOpt = decls.OverloadOpt // UnaryBinding provides the implementation of a unary overload. The provided function is protected by a runtime // type-guard which ensures runtime type agreement between the overload signature and runtime argument types. func UnaryBinding(binding functions.UnaryOp) OverloadOpt { - return func(o *overloadDecl) (*overloadDecl, error) { - if o.hasBinding() { - return nil, fmt.Errorf("overload already has a binding: %s", o.id) - } - if len(o.argTypes) != 1 { - return nil, fmt.Errorf("unary function bound to non-unary overload: %s", o.id) - } - o.unaryOp = binding - return o, nil - } + return decls.UnaryBinding(binding) } // BinaryBinding provides the implementation of a binary overload. The provided function is protected by a runtime // type-guard which ensures runtime type agreement between the overload signature and runtime argument types. func BinaryBinding(binding functions.BinaryOp) OverloadOpt { - return func(o *overloadDecl) (*overloadDecl, error) { - if o.hasBinding() { - return nil, fmt.Errorf("overload already has a binding: %s", o.id) - } - if len(o.argTypes) != 2 { - return nil, fmt.Errorf("binary function bound to non-binary overload: %s", o.id) - } - o.binaryOp = binding - return o, nil - } + return decls.BinaryBinding(binding) } // FunctionBinding provides the implementation of a variadic overload. The provided function is protected by a runtime // type-guard which ensures runtime type agreement between the overload signature and runtime argument types. func FunctionBinding(binding functions.FunctionOp) OverloadOpt { - return func(o *overloadDecl) (*overloadDecl, error) { - if o.hasBinding() { - return nil, fmt.Errorf("overload already has a binding: %s", o.id) - } - o.functionOp = binding - return o, nil - } + return decls.FunctionBinding(binding) } // OverloadIsNonStrict enables the function to be called with error and unknown argument values. // // Note: do not use this option unless absoluately necessary as it should be an uncommon feature. func OverloadIsNonStrict() OverloadOpt { - return func(o *overloadDecl) (*overloadDecl, error) { - o.nonStrict = true - return o, nil - } + return decls.OverloadIsNonStrict() } // OverloadOperandTrait configures a set of traits which the first argument to the overload must implement in order to be // successfully invoked. func OverloadOperandTrait(trait int) OverloadOpt { - return func(o *overloadDecl) (*overloadDecl, error) { - o.operandTrait = trait - return o, nil - } -} - -type functionDecl struct { - name string - overloads []*overloadDecl - options []FunctionOpt - singleton *functions.Overload - initialized bool -} - -// init ensures that a function's options have been applied. -// -// This function is used in both the environment configuration and internally for function merges. -func (f *functionDecl) init() error { - if f.initialized { - return nil - } - f.initialized = true - - var err error - for _, opt := range f.options { - f, err = opt(f) - if err != nil { - return err - } - } - if len(f.overloads) == 0 { - return fmt.Errorf("function %s must have at least one overload", f.name) - } - return nil -} - -// bindings produces a set of function bindings, if any are defined. -func (f *functionDecl) bindings() ([]*functions.Overload, error) { - overloads := []*functions.Overload{} - nonStrict := false - for _, o := range f.overloads { - if o.hasBinding() { - overload := &functions.Overload{ - Operator: o.id, - Unary: o.guardedUnaryOp(f.name), - Binary: o.guardedBinaryOp(f.name), - Function: o.guardedFunctionOp(f.name), - OperandTrait: o.operandTrait, - NonStrict: o.nonStrict, - } - overloads = append(overloads, overload) - nonStrict = nonStrict || o.nonStrict - } - } - if f.singleton != nil { - if len(overloads) != 0 { - return nil, fmt.Errorf("singleton function incompatible with specialized overloads: %s", f.name) - } - return []*functions.Overload{ - { - Operator: f.name, - Unary: f.singleton.Unary, - Binary: f.singleton.Binary, - Function: f.singleton.Function, - OperandTrait: f.singleton.OperandTrait, - }, - }, nil - } - if len(overloads) == 0 { - return overloads, nil - } - // Single overload. Replicate an entry for it using the function name as well. - if len(overloads) == 1 { - if overloads[0].Operator == f.name { - return overloads, nil - } - return append(overloads, &functions.Overload{ - Operator: f.name, - Unary: overloads[0].Unary, - Binary: overloads[0].Binary, - Function: overloads[0].Function, - NonStrict: overloads[0].NonStrict, - OperandTrait: overloads[0].OperandTrait, - }), nil - } - // All of the defined overloads are wrapped into a top-level function which - // performs dynamic dispatch to the proper overload based on the argument types. - bindings := append([]*functions.Overload{}, overloads...) - funcDispatch := func(args ...ref.Val) ref.Val { - for _, o := range f.overloads { - if !o.matchesRuntimeSignature(args...) { - continue - } - switch len(args) { - case 1: - if o.unaryOp != nil { - return o.unaryOp(args[0]) - } - case 2: - if o.binaryOp != nil { - return o.binaryOp(args[0], args[1]) - } - } - if o.functionOp != nil { - return o.functionOp(args...) - } - // eventually this will fall through to the noSuchOverload below. - } - return noSuchOverload(f.name, args...) - } - function := &functions.Overload{ - Operator: f.name, - Function: funcDispatch, - NonStrict: nonStrict, - } - return append(bindings, function), nil -} - -// merge one function declaration with another. -// -// If a function is extended, by say adding new overloads to an existing function, then it is merged with the -// prior definition of the function at which point its overloads must not collide with pre-existing overloads -// and its bindings (singleton, or per-overload) must not conflict with previous definitions either. -func (f *functionDecl) merge(other *functionDecl) (*functionDecl, error) { - if f.name != other.name { - return nil, fmt.Errorf("cannot merge unrelated functions. %s and %s", f.name, other.name) - } - err := f.init() - if err != nil { - return nil, err - } - err = other.init() - if err != nil { - return nil, err - } - merged := &functionDecl{ - name: f.name, - overloads: make([]*overloadDecl, len(f.overloads)), - options: []FunctionOpt{}, - initialized: true, - singleton: f.singleton, - } - copy(merged.overloads, f.overloads) - for _, o := range other.overloads { - err := merged.addOverload(o) - if err != nil { - return nil, fmt.Errorf("function declaration merge failed: %v", err) - } - } - if other.singleton != nil { - if merged.singleton != nil { - return nil, fmt.Errorf("function already has a binding: %s", f.name) - } - merged.singleton = other.singleton - } - return merged, nil -} - -// addOverload ensures that the new overload does not collide with an existing overload signature; -// however, if the function signatures are identical, the implementation may be rewritten as its -// difficult to compare functions by object identity. -func (f *functionDecl) addOverload(overload *overloadDecl) error { - for index, o := range f.overloads { - if o.id != overload.id && o.signatureOverlaps(overload) { - return fmt.Errorf("overload signature collision in function %s: %s collides with %s", f.name, o.id, overload.id) - } - if o.id == overload.id { - if o.signatureEquals(overload) && o.nonStrict == overload.nonStrict { - // Allow redefinition of an overload implementation so long as the signatures match. - f.overloads[index] = overload - return nil - } - return fmt.Errorf("overload redefinition in function. %s: %s has multiple definitions", f.name, o.id) - } - } - f.overloads = append(f.overloads, overload) - return nil -} - -func noSuchOverload(funcName string, args ...ref.Val) ref.Val { - argTypes := make([]string, len(args)) - for i, arg := range args { - argTypes[i] = arg.Type().TypeName() - } - signature := strings.Join(argTypes, ", ") - return types.NewErr("no such overload: %s(%s)", funcName, signature) -} - -// overloadDecl contains all of the relevant information regarding a specific function overload. -type overloadDecl struct { - id string - argTypes []*Type - resultType *Type - memberFunction bool - - // binding options, optional but encouraged. - unaryOp functions.UnaryOp - binaryOp functions.BinaryOp - functionOp functions.FunctionOp - - // behavioral options, uncommon - nonStrict bool - operandTrait int -} - -func (o *overloadDecl) hasBinding() bool { - return o.unaryOp != nil || o.binaryOp != nil || o.functionOp != nil -} - -// guardedUnaryOp creates an invocation guard around the provided unary operator, if one is defined. -func (o *overloadDecl) guardedUnaryOp(funcName string) functions.UnaryOp { - if o.unaryOp == nil { - return nil - } - return func(arg ref.Val) ref.Val { - if !o.matchesRuntimeUnarySignature(arg) { - return noSuchOverload(funcName, arg) - } - return o.unaryOp(arg) - } -} - -// guardedBinaryOp creates an invocation guard around the provided binary operator, if one is defined. -func (o *overloadDecl) guardedBinaryOp(funcName string) functions.BinaryOp { - if o.binaryOp == nil { - return nil - } - return func(arg1, arg2 ref.Val) ref.Val { - if !o.matchesRuntimeBinarySignature(arg1, arg2) { - return noSuchOverload(funcName, arg1, arg2) - } - return o.binaryOp(arg1, arg2) - } -} - -// guardedFunctionOp creates an invocation guard around the provided variadic function binding, if one is provided. -func (o *overloadDecl) guardedFunctionOp(funcName string) functions.FunctionOp { - if o.functionOp == nil { - return nil - } - return func(args ...ref.Val) ref.Val { - if !o.matchesRuntimeSignature(args...) { - return noSuchOverload(funcName, args...) - } - return o.functionOp(args...) - } -} - -// matchesRuntimeUnarySignature indicates whether the argument type is runtime assiganble to the overload's expected argument. -func (o *overloadDecl) matchesRuntimeUnarySignature(arg ref.Val) bool { - if o.nonStrict && types.IsUnknownOrError(arg) { - return true - } - return o.argTypes[0].IsAssignableRuntimeType(arg) && (o.operandTrait == 0 || arg.Type().HasTrait(o.operandTrait)) -} - -// matchesRuntimeBinarySignature indicates whether the argument types are runtime assiganble to the overload's expected arguments. -func (o *overloadDecl) matchesRuntimeBinarySignature(arg1, arg2 ref.Val) bool { - if o.nonStrict { - if types.IsUnknownOrError(arg1) { - return types.IsUnknownOrError(arg2) || o.argTypes[1].IsAssignableRuntimeType(arg2) - } - } else if !o.argTypes[1].IsAssignableRuntimeType(arg2) { - return false - } - return o.argTypes[0].IsAssignableRuntimeType(arg1) && (o.operandTrait == 0 || arg1.Type().HasTrait(o.operandTrait)) -} - -// matchesRuntimeSignature indicates whether the argument types are runtime assiganble to the overload's expected arguments. -func (o *overloadDecl) matchesRuntimeSignature(args ...ref.Val) bool { - if len(args) != len(o.argTypes) { - return false - } - if len(args) == 0 { - return true - } - allArgsMatch := true - for i, arg := range args { - if o.nonStrict && types.IsUnknownOrError(arg) { - continue - } - allArgsMatch = allArgsMatch && o.argTypes[i].IsAssignableRuntimeType(arg) - } - - arg := args[0] - return allArgsMatch && (o.operandTrait == 0 || (o.nonStrict && types.IsUnknownOrError(arg)) || arg.Type().HasTrait(o.operandTrait)) -} - -// signatureEquals indicates whether one overload has an identical signature to another overload. -// -// Providing a duplicate signature is not an issue, but an overloapping signature is problematic. -func (o *overloadDecl) signatureEquals(other *overloadDecl) bool { - if o.id != other.id || o.memberFunction != other.memberFunction || len(o.argTypes) != len(other.argTypes) { - return false - } - for i, at := range o.argTypes { - oat := other.argTypes[i] - if !at.equals(oat) { - return false - } - } - return o.resultType.equals(other.resultType) -} - -// signatureOverlaps indicates whether one overload has an overlapping signature with another overload. -// -// The 'other' overload must first be checked for equality before determining whether it overlaps in order to be completely accurate. -func (o *overloadDecl) signatureOverlaps(other *overloadDecl) bool { - if o.memberFunction != other.memberFunction || len(o.argTypes) != len(other.argTypes) { - return false - } - argsOverlap := true - for i, argType := range o.argTypes { - otherArgType := other.argTypes[i] - argsOverlap = argsOverlap && - (argType.IsAssignableType(otherArgType) || - otherArgType.IsAssignableType(argType)) - } - return argsOverlap -} - -func newOverload(overloadID string, memberFunction bool, args []*Type, resultType *Type, opts ...OverloadOpt) FunctionOpt { - return func(f *functionDecl) (*functionDecl, error) { - overload := &overloadDecl{ - id: overloadID, - argTypes: args, - resultType: resultType, - memberFunction: memberFunction, - } - var err error - for _, opt := range opts { - overload, err = opt(overload) - if err != nil { - return nil, err - } - } - err = f.addOverload(overload) - if err != nil { - return nil, err - } - return f, nil - } -} - -func maybeWrapper(t *Type, pbType *exprpb.Type) *exprpb.Type { - if t.IsAssignableType(NullType) { - return decls.NewWrapperType(pbType) - } - return pbType + return decls.OverloadOperandTrait(trait) } // TypeToExprType converts a CEL-native type representation to a protobuf CEL Type representation. func TypeToExprType(t *Type) (*exprpb.Type, error) { - switch t.kind { - case AnyKind: - return decls.Any, nil - case BoolKind: - return maybeWrapper(t, decls.Bool), nil - case BytesKind: - return maybeWrapper(t, decls.Bytes), nil - case DoubleKind: - return maybeWrapper(t, decls.Double), nil - case DurationKind: - return decls.Duration, nil - case DynKind: - return decls.Dyn, nil - case IntKind: - return maybeWrapper(t, decls.Int), nil - case ListKind: - et, err := TypeToExprType(t.parameters[0]) - if err != nil { - return nil, err - } - return decls.NewListType(et), nil - case MapKind: - kt, err := TypeToExprType(t.parameters[0]) - if err != nil { - return nil, err - } - vt, err := TypeToExprType(t.parameters[1]) - if err != nil { - return nil, err - } - return decls.NewMapType(kt, vt), nil - case NullTypeKind: - return decls.Null, nil - case OpaqueKind: - params := make([]*exprpb.Type, len(t.parameters)) - for i, p := range t.parameters { - pt, err := TypeToExprType(p) - if err != nil { - return nil, err - } - params[i] = pt - } - return decls.NewAbstractType(t.runtimeType.TypeName(), params...), nil - case StringKind: - return maybeWrapper(t, decls.String), nil - case StructKind: - switch t.runtimeType.TypeName() { - case "google.protobuf.Any": - return decls.Any, nil - case "google.protobuf.Duration": - return decls.Duration, nil - case "google.protobuf.Timestamp": - return decls.Timestamp, nil - case "google.protobuf.Value": - return decls.Dyn, nil - case "google.protobuf.ListValue": - return decls.NewListType(decls.Dyn), nil - case "google.protobuf.Struct": - return decls.NewMapType(decls.String, decls.Dyn), nil - case "google.protobuf.BoolValue": - return decls.NewWrapperType(decls.Bool), nil - case "google.protobuf.BytesValue": - return decls.NewWrapperType(decls.Bytes), nil - case "google.protobuf.DoubleValue", "google.protobuf.FloatValue": - return decls.NewWrapperType(decls.Double), nil - case "google.protobuf.Int32Value", "google.protobuf.Int64Value": - return decls.NewWrapperType(decls.Int), nil - case "google.protobuf.StringValue": - return decls.NewWrapperType(decls.String), nil - case "google.protobuf.UInt32Value", "google.protobuf.UInt64Value": - return decls.NewWrapperType(decls.Uint), nil - default: - return decls.NewObjectType(t.runtimeType.TypeName()), nil - } - case TimestampKind: - return decls.Timestamp, nil - case TypeParamKind: - return decls.NewTypeParamType(t.runtimeType.TypeName()), nil - case TypeKind: - return decls.NewTypeType(decls.Dyn), nil - case UintKind: - return maybeWrapper(t, decls.Uint), nil - } - return nil, fmt.Errorf("missing type conversion to proto: %v", t) + return types.TypeToExprType(t) } // ExprTypeToType converts a protobuf CEL type representation to a CEL-native type representation. func ExprTypeToType(t *exprpb.Type) (*Type, error) { - switch t.GetTypeKind().(type) { - case *exprpb.Type_Dyn: - return DynType, nil - case *exprpb.Type_AbstractType_: - paramTypes := make([]*Type, len(t.GetAbstractType().GetParameterTypes())) - for i, p := range t.GetAbstractType().GetParameterTypes() { - pt, err := ExprTypeToType(p) - if err != nil { - return nil, err - } - paramTypes[i] = pt - } - return OpaqueType(t.GetAbstractType().GetName(), paramTypes...), nil - case *exprpb.Type_ListType_: - et, err := ExprTypeToType(t.GetListType().GetElemType()) - if err != nil { - return nil, err - } - return ListType(et), nil - case *exprpb.Type_MapType_: - kt, err := ExprTypeToType(t.GetMapType().GetKeyType()) - if err != nil { - return nil, err - } - vt, err := ExprTypeToType(t.GetMapType().GetValueType()) - if err != nil { - return nil, err - } - return MapType(kt, vt), nil - case *exprpb.Type_MessageType: - switch t.GetMessageType() { - case "google.protobuf.Any": - return AnyType, nil - case "google.protobuf.Duration": - return DurationType, nil - case "google.protobuf.Timestamp": - return TimestampType, nil - case "google.protobuf.Value": - return DynType, nil - case "google.protobuf.ListValue": - return ListType(DynType), nil - case "google.protobuf.Struct": - return MapType(StringType, DynType), nil - case "google.protobuf.BoolValue": - return NullableType(BoolType), nil - case "google.protobuf.BytesValue": - return NullableType(BytesType), nil - case "google.protobuf.DoubleValue", "google.protobuf.FloatValue": - return NullableType(DoubleType), nil - case "google.protobuf.Int32Value", "google.protobuf.Int64Value": - return NullableType(IntType), nil - case "google.protobuf.StringValue": - return NullableType(StringType), nil - case "google.protobuf.UInt32Value", "google.protobuf.UInt64Value": - return NullableType(UintType), nil - default: - return ObjectType(t.GetMessageType()), nil - } - case *exprpb.Type_Null: - return NullType, nil - case *exprpb.Type_Primitive: - switch t.GetPrimitive() { - case exprpb.Type_BOOL: - return BoolType, nil - case exprpb.Type_BYTES: - return BytesType, nil - case exprpb.Type_DOUBLE: - return DoubleType, nil - case exprpb.Type_INT64: - return IntType, nil - case exprpb.Type_STRING: - return StringType, nil - case exprpb.Type_UINT64: - return UintType, nil - default: - return nil, fmt.Errorf("unsupported primitive type: %v", t) - } - case *exprpb.Type_TypeParam: - return TypeParamType(t.GetTypeParam()), nil - case *exprpb.Type_Type: - return TypeType, nil - case *exprpb.Type_WellKnown: - switch t.GetWellKnown() { - case exprpb.Type_ANY: - return AnyType, nil - case exprpb.Type_DURATION: - return DurationType, nil - case exprpb.Type_TIMESTAMP: - return TimestampType, nil - default: - return nil, fmt.Errorf("unsupported well-known type: %v", t) - } - case *exprpb.Type_Wrapper: - t, err := ExprTypeToType(&exprpb.Type{TypeKind: &exprpb.Type_Primitive{Primitive: t.GetWrapper()}}) - if err != nil { - return nil, err - } - return NullableType(t), nil - default: - return nil, fmt.Errorf("unsupported type: %v", t) - } + return types.ExprTypeToType(t) } // ExprDeclToDeclaration converts a protobuf CEL declaration to a CEL-native declaration, either a Variable or Function. @@ -1130,117 +319,37 @@ func ExprDeclToDeclaration(d *exprpb.Decl) (EnvOption, error) { for i, o := range overloads { args := make([]*Type, len(o.GetParams())) for j, p := range o.GetParams() { - a, err := ExprTypeToType(p) + a, err := types.ExprTypeToType(p) if err != nil { return nil, err } args[j] = a } - res, err := ExprTypeToType(o.GetResultType()) + res, err := types.ExprTypeToType(o.GetResultType()) if err != nil { return nil, err } - opts[i] = Overload(o.GetOverloadId(), args, res) + if o.IsInstanceFunction { + opts[i] = decls.MemberOverload(o.GetOverloadId(), args, res) + } else { + opts[i] = decls.Overload(o.GetOverloadId(), args, res) + } } return Function(d.GetName(), opts...), nil case *exprpb.Decl_Ident: - t, err := ExprTypeToType(d.GetIdent().GetType()) + t, err := types.ExprTypeToType(d.GetIdent().GetType()) if err != nil { return nil, err } - return Variable(d.GetName(), t), nil - default: - return nil, fmt.Errorf("unsupported decl: %v", d) - } - -} - -func functionDeclToExprDecl(f *functionDecl) (*exprpb.Decl, error) { - overloads := make([]*exprpb.Decl_FunctionDecl_Overload, len(f.overloads)) - i := 0 - for _, o := range f.overloads { - paramNames := map[string]struct{}{} - argTypes := make([]*exprpb.Type, len(o.argTypes)) - for j, a := range o.argTypes { - collectParamNames(paramNames, a) - at, err := TypeToExprType(a) - if err != nil { - return nil, err - } - argTypes[j] = at + if d.GetIdent().GetValue() == nil { + return Variable(d.GetName(), t), nil } - collectParamNames(paramNames, o.resultType) - resultType, err := TypeToExprType(o.resultType) + val, err := ast.ConstantToVal(d.GetIdent().GetValue()) if err != nil { return nil, err } - if len(paramNames) == 0 { - if o.memberFunction { - overloads[i] = decls.NewInstanceOverload(o.id, argTypes, resultType) - } else { - overloads[i] = decls.NewOverload(o.id, argTypes, resultType) - } - } else { - params := []string{} - for pn := range paramNames { - params = append(params, pn) - } - if o.memberFunction { - overloads[i] = decls.NewParameterizedInstanceOverload(o.id, argTypes, resultType, params) - } else { - overloads[i] = decls.NewParameterizedOverload(o.id, argTypes, resultType, params) - } - } - i++ - } - return decls.NewFunction(f.name, overloads...), nil -} - -func collectParamNames(paramNames map[string]struct{}, arg *Type) { - if arg.kind == TypeParamKind { - paramNames[arg.runtimeType.TypeName()] = struct{}{} - } - for _, param := range arg.parameters { - collectParamNames(paramNames, param) - } -} - -func typeValueToKind(tv *types.TypeValue) (Kind, error) { - switch tv { - case types.BoolType: - return BoolKind, nil - case types.DoubleType: - return DoubleKind, nil - case types.IntType: - return IntKind, nil - case types.UintType: - return UintKind, nil - case types.ListType: - return ListKind, nil - case types.MapType: - return MapKind, nil - case types.StringType: - return StringKind, nil - case types.BytesType: - return BytesKind, nil - case types.DurationType: - return DurationKind, nil - case types.TimestampType: - return TimestampKind, nil - case types.NullType: - return NullTypeKind, nil - case types.TypeType: - return TypeKind, nil + return Constant(d.GetName(), t, val), nil default: - switch tv.TypeName() { - case "dyn": - return DynKind, nil - case "google.protobuf.Any": - return AnyKind, nil - case "optional": - return OpaqueKind, nil - default: - return 0, fmt.Errorf("no known conversion for type of %s", tv.TypeName()) - } + return nil, fmt.Errorf("unsupported decl: %v", d) } } diff --git a/vendor/github.com/google/cel-go/cel/env.go b/vendor/github.com/google/cel-go/cel/env.go index d9c2ef63f..6568a8b80 100644 --- a/vendor/github.com/google/cel-go/cel/env.go +++ b/vendor/github.com/google/cel-go/cel/env.go @@ -16,13 +16,14 @@ package cel import ( "errors" - "fmt" "sync" "github.com/google/cel-go/checker" - "github.com/google/cel-go/checker/decls" + chkdecls "github.com/google/cel-go/checker/decls" "github.com/google/cel-go/common" + celast "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/decls" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/interpreter" @@ -37,72 +38,103 @@ type Source = common.Source // Ast representing the checked or unchecked expression, its source, and related metadata such as // source position information. type Ast struct { - expr *exprpb.Expr - info *exprpb.SourceInfo - source Source - refMap map[int64]*exprpb.Reference - typeMap map[int64]*exprpb.Type + source Source + impl *celast.AST +} + +// NativeRep converts the AST to a Go-native representation. +func (ast *Ast) NativeRep() *celast.AST { + return ast.impl } // Expr returns the proto serializable instance of the parsed/checked expression. +// +// Deprecated: prefer cel.AstToCheckedExpr() or cel.AstToParsedExpr() and call GetExpr() +// the result instead. func (ast *Ast) Expr() *exprpb.Expr { - return ast.expr + if ast == nil { + return nil + } + pbExpr, _ := celast.ExprToProto(ast.impl.Expr()) + return pbExpr } // IsChecked returns whether the Ast value has been successfully type-checked. func (ast *Ast) IsChecked() bool { - return ast.typeMap != nil && len(ast.typeMap) > 0 + if ast == nil { + return false + } + return ast.impl.IsChecked() } // SourceInfo returns character offset and newline position information about expression elements. func (ast *Ast) SourceInfo() *exprpb.SourceInfo { - return ast.info + if ast == nil { + return nil + } + pbInfo, _ := celast.SourceInfoToProto(ast.impl.SourceInfo()) + return pbInfo } // ResultType returns the output type of the expression if the Ast has been type-checked, else -// returns decls.Dyn as the parse step cannot infer the type. +// returns chkdecls.Dyn as the parse step cannot infer the type. // // Deprecated: use OutputType func (ast *Ast) ResultType() *exprpb.Type { - if !ast.IsChecked() { - return decls.Dyn + out := ast.OutputType() + t, err := TypeToExprType(out) + if err != nil { + return chkdecls.Dyn } - return ast.typeMap[ast.expr.GetId()] + return t } // OutputType returns the output type of the expression if the Ast has been type-checked, else // returns cel.DynType as the parse step cannot infer types. func (ast *Ast) OutputType() *Type { - t, err := ExprTypeToType(ast.ResultType()) - if err != nil { - return DynType + if ast == nil { + return types.ErrorType } - return t + return ast.impl.GetType(ast.impl.Expr().ID()) } // Source returns a view of the input used to create the Ast. This source may be complete or // constructed from the SourceInfo. func (ast *Ast) Source() Source { + if ast == nil { + return nil + } return ast.source } // FormatType converts a type message into a string representation. +// +// Deprecated: prefer FormatCELType func FormatType(t *exprpb.Type) string { return checker.FormatCheckedType(t) } +// FormatCELType formats a cel.Type value to a string representation. +// +// The type formatting is identical to FormatType. +func FormatCELType(t *Type) string { + return checker.FormatCELType(t) +} + // Env encapsulates the context necessary to perform parsing, type checking, or generation of // evaluable programs for different expressions. type Env struct { Container *containers.Container - functions map[string]*functionDecl - declarations []*exprpb.Decl + variables []*decls.VariableDecl + functions map[string]*decls.FunctionDecl macros []parser.Macro - adapter ref.TypeAdapter - provider ref.TypeProvider + adapter types.Adapter + provider types.Provider features map[int]bool appliedFeatures map[int]bool libraries map[string]bool + validators []ASTValidator + costOptions []checker.CostOption // Internal parser representation prsr *parser.Parser @@ -154,8 +186,8 @@ func NewCustomEnv(opts ...EnvOption) (*Env, error) { return nil, err } return (&Env{ - declarations: []*exprpb.Decl{}, - functions: map[string]*functionDecl{}, + variables: []*decls.VariableDecl{}, + functions: map[string]*decls.FunctionDecl{}, macros: []parser.Macro{}, Container: containers.DefaultContainer, adapter: registry, @@ -163,41 +195,63 @@ func NewCustomEnv(opts ...EnvOption) (*Env, error) { features: map[int]bool{}, appliedFeatures: map[int]bool{}, libraries: map[string]bool{}, + validators: []ASTValidator{}, progOpts: []ProgramOption{}, + costOptions: []checker.CostOption{}, }).configure(opts) } // Check performs type-checking on the input Ast and yields a checked Ast and/or set of Issues. +// If any `ASTValidators` are configured on the environment, they will be applied after a valid +// type-check result. If any issues are detected, the validators will provide them on the +// output Issues object. // -// Checking has failed if the returned Issues value and its Issues.Err() value are non-nil. -// Issues should be inspected if they are non-nil, but may not represent a fatal error. +// Either checking or validation has failed if the returned Issues value and its Issues.Err() +// value are non-nil. Issues should be inspected if they are non-nil, but may not represent a +// fatal error. // // It is possible to have both non-nil Ast and Issues values returned from this call: however, // the mere presence of an Ast does not imply that it is valid for use. func (e *Env) Check(ast *Ast) (*Ast, *Issues) { - // Note, errors aren't currently possible on the Ast to ParsedExpr conversion. - pe, _ := AstToParsedExpr(ast) - // Construct the internal checker env, erroring if there is an issue adding the declarations. chk, err := e.initChecker() if err != nil { errs := common.NewErrors(ast.Source()) errs.ReportError(common.NoLocation, err.Error()) - return nil, NewIssues(errs) + return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) } - res, errs := checker.Check(pe, ast.Source(), chk) + checked, errs := checker.Check(ast.impl, ast.Source(), chk) if len(errs.GetErrors()) > 0 { - return nil, NewIssues(errs) + return nil, NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) } // Manually create the Ast to ensure that the Ast source information (which may be more // detailed than the information provided by Check), is returned to the caller. - return &Ast{ - source: ast.Source(), - expr: res.GetExpr(), - info: res.GetSourceInfo(), - refMap: res.GetReferenceMap(), - typeMap: res.GetTypeMap()}, nil + ast = &Ast{ + source: ast.Source(), + impl: checked} + + // Avoid creating a validator config if it's not needed. + if len(e.validators) == 0 { + return ast, nil + } + + // Generate a validator configuration from the set of configured validators. + vConfig := newValidatorConfig() + for _, v := range e.validators { + if cv, ok := v.(ASTValidatorConfigurer); ok { + cv.Configure(vConfig) + } + } + // Apply additional validators on the type-checked result. + iss := NewIssuesWithSourceInfo(errs, ast.impl.SourceInfo()) + for _, v := range e.validators { + v.Validate(e, vConfig, checked, iss) + } + if iss.Err() != nil { + return nil, iss + } + return ast, nil } // Compile combines the Parse and Check phases CEL program compilation to produce an Ast and @@ -255,7 +309,7 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { copy(chkOptsCopy, e.chkOpts) // Copy the declarations if needed. - decsCopy := []*exprpb.Decl{} + varsCopy := []*decls.VariableDecl{} if chk != nil { // If the type-checker has already been instantiated, then the e.declarations have been // validated within the chk instance. @@ -263,8 +317,8 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { } else { // If the type-checker has not been instantiated, ensure the unvalidated declarations are // provided to the extended Env instance. - decsCopy = make([]*exprpb.Decl, len(e.declarations)) - copy(decsCopy, e.declarations) + varsCopy = make([]*decls.VariableDecl, len(e.variables)) + copy(varsCopy, e.variables) } // Copy macros and program options @@ -276,8 +330,8 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { // Copy the adapter / provider if they appear to be mutable. adapter := e.adapter provider := e.provider - adapterReg, isAdapterReg := e.adapter.(ref.TypeRegistry) - providerReg, isProviderReg := e.provider.(ref.TypeRegistry) + adapterReg, isAdapterReg := e.adapter.(*types.Registry) + providerReg, isProviderReg := e.provider.(*types.Registry) // In most cases the provider and adapter will be a ref.TypeRegistry; // however, in the rare cases where they are not, they are assumed to // be immutable. Since it is possible to set the TypeProvider separately @@ -308,7 +362,7 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { for k, v := range e.appliedFeatures { appliedFeaturesCopy[k] = v } - funcsCopy := make(map[string]*functionDecl, len(e.functions)) + funcsCopy := make(map[string]*decls.FunctionDecl, len(e.functions)) for k, v := range e.functions { funcsCopy[k] = v } @@ -316,10 +370,14 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { for k, v := range e.libraries { libsCopy[k] = v } + validatorsCopy := make([]ASTValidator, len(e.validators)) + copy(validatorsCopy, e.validators) + costOptsCopy := make([]checker.CostOption, len(e.costOptions)) + copy(costOptsCopy, e.costOptions) ext := &Env{ Container: e.Container, - declarations: decsCopy, + variables: varsCopy, functions: funcsCopy, macros: macsCopy, progOpts: progOptsCopy, @@ -327,9 +385,11 @@ func (e *Env) Extend(opts ...EnvOption) (*Env, error) { features: featuresCopy, appliedFeatures: appliedFeaturesCopy, libraries: libsCopy, + validators: validatorsCopy, provider: provider, chkOpts: chkOptsCopy, prsrOpts: prsrOptsCopy, + costOptions: costOptsCopy, } return ext.configure(opts) } @@ -347,6 +407,25 @@ func (e *Env) HasLibrary(libName string) bool { return exists && configured } +// Libraries returns a list of SingletonLibrary that have been configured in the environment. +func (e *Env) Libraries() []string { + libraries := make([]string, 0, len(e.libraries)) + for libName := range e.libraries { + libraries = append(libraries, libName) + } + return libraries +} + +// HasValidator returns whether a specific ASTValidator has been configured in the environment. +func (e *Env) HasValidator(name string) bool { + for _, v := range e.validators { + if v.Name() == name { + return true + } + } + return false +} + // Parse parses the input expression value `txt` to a Ast and/or a set of Issues. // // This form of Parse creates a Source value for the input `txt` and forwards to the @@ -364,16 +443,11 @@ func (e *Env) Parse(txt string) (*Ast, *Issues) { // It is possible to have both non-nil Ast and Issues values returned from this call; however, // the mere presence of an Ast does not imply that it is valid for use. func (e *Env) ParseSource(src Source) (*Ast, *Issues) { - res, errs := e.prsr.Parse(src) + parsed, errs := e.prsr.Parse(src) if len(errs.GetErrors()) > 0 { return nil, &Issues{errs: errs} } - // Manually create the Ast to ensure that the text source information is propagated on - // subsequent calls to Check. - return &Ast{ - source: src, - expr: res.GetExpr(), - info: res.GetSourceInfo()}, nil + return &Ast{source: src, impl: parsed}, nil } // Program generates an evaluable instance of the Ast within the environment (Env). @@ -388,36 +462,64 @@ func (e *Env) Program(ast *Ast, opts ...ProgramOption) (Program, error) { return newProgram(e, ast, optSet) } +// CELTypeAdapter returns the `types.Adapter` configured for the environment. +func (e *Env) CELTypeAdapter() types.Adapter { + return e.adapter +} + +// CELTypeProvider returns the `types.Provider` configured for the environment. +func (e *Env) CELTypeProvider() types.Provider { + return e.provider +} + // TypeAdapter returns the `ref.TypeAdapter` configured for the environment. +// +// Deprecated: use CELTypeAdapter() func (e *Env) TypeAdapter() ref.TypeAdapter { return e.adapter } // TypeProvider returns the `ref.TypeProvider` configured for the environment. +// +// Deprecated: use CELTypeProvider() func (e *Env) TypeProvider() ref.TypeProvider { - return e.provider + if legacyProvider, ok := e.provider.(ref.TypeProvider); ok { + return legacyProvider + } + return &interopLegacyTypeProvider{Provider: e.provider} } -// UnknownVars returns an interpreter.PartialActivation which marks all variables -// declared in the Env as unknown AttributePattern values. +// UnknownVars returns an interpreter.PartialActivation which marks all variables declared in the +// Env as unknown AttributePattern values. // -// Note, the UnknownVars will behave the same as an interpreter.EmptyActivation -// unless the PartialAttributes option is provided as a ProgramOption. +// Note, the UnknownVars will behave the same as an interpreter.EmptyActivation unless the +// PartialAttributes option is provided as a ProgramOption. func (e *Env) UnknownVars() interpreter.PartialActivation { - var unknownPatterns []*interpreter.AttributePattern - for _, d := range e.declarations { - switch d.GetDeclKind().(type) { - case *exprpb.Decl_Ident: - unknownPatterns = append(unknownPatterns, - interpreter.NewAttributePattern(d.GetName())) - } - } - part, _ := PartialVars( - interpreter.EmptyActivation(), - unknownPatterns...) + act := interpreter.EmptyActivation() + part, _ := PartialVars(act, e.computeUnknownVars(act)...) return part } +// PartialVars returns an interpreter.PartialActivation where all variables not in the input variable +// set, but which have been configured in the environment, are marked as unknown. +// +// The `vars` value may either be an interpreter.Activation or any valid input to the +// interpreter.NewActivation call. +// +// Note, this is equivalent to calling cel.PartialVars and manually configuring the set of unknown +// variables. For more advanced use cases of partial state where portions of an object graph, rather +// than top-level variables, are missing the PartialVars() method may be a more suitable choice. +// +// Note, the PartialVars will behave the same as an interpreter.EmptyActivation unless the +// PartialAttributes option is provided as a ProgramOption. +func (e *Env) PartialVars(vars any) (interpreter.PartialActivation, error) { + act, err := interpreter.NewActivation(vars) + if err != nil { + return nil, err + } + return PartialVars(act, e.computeUnknownVars(act)...) +} + // ResidualAst takes an Ast and its EvalDetails to produce a new Ast which only contains the // attribute references which are unknown. // @@ -441,8 +543,9 @@ func (e *Env) UnknownVars() interpreter.PartialActivation { // TODO: Consider adding an option to generate a Program.Residual to avoid round-tripping to an // Ast format and then Program again. func (e *Env) ResidualAst(a *Ast, details *EvalDetails) (*Ast, error) { - pruned := interpreter.PruneAst(a.Expr(), a.SourceInfo().GetMacroCalls(), details.State()) - expr, err := AstToString(ParsedExprToAst(pruned)) + pruned := interpreter.PruneAst(a.impl.Expr(), a.impl.SourceInfo().MacroCalls(), details.State()) + newAST := &Ast{source: a.Source(), impl: pruned} + expr, err := AstToString(newAST) if err != nil { return nil, err } @@ -463,11 +566,10 @@ func (e *Env) ResidualAst(a *Ast, details *EvalDetails) (*Ast, error) { // EstimateCost estimates the cost of a type checked CEL expression using the length estimates of input data and // extension functions provided by estimator. func (e *Env) EstimateCost(ast *Ast, estimator checker.CostEstimator, opts ...checker.CostOption) (checker.CostEstimate, error) { - checked, err := AstToCheckedExpr(ast) - if err != nil { - return checker.CostEstimate{}, fmt.Errorf("EsimateCost could not inspect Ast: %v", err) - } - return checker.Cost(checked, estimator, opts...) + extendedOpts := make([]checker.CostOption, 0, len(e.costOptions)) + extendedOpts = append(extendedOpts, opts...) + extendedOpts = append(extendedOpts, e.costOptions...) + return checker.Cost(ast.impl, estimator, extendedOpts...) } // configure applies a series of EnvOptions to the current environment. @@ -488,14 +590,6 @@ func (e *Env) configure(opts []EnvOption) (*Env, error) { return nil, err } - // Initialize all of the functions configured within the environment. - for _, fn := range e.functions { - err = fn.init() - if err != nil { - return nil, err - } - } - // Configure the parser. prsrOpts := []parser.Option{} prsrOpts = append(prsrOpts, e.prsrOpts...) @@ -504,6 +598,9 @@ func (e *Env) configure(opts []EnvOption) (*Env, error) { if e.HasFeature(featureEnableMacroCallTracking) { prsrOpts = append(prsrOpts, parser.PopulateMacroCalls(true)) } + if e.HasFeature(featureVariadicLogicalASTs) { + prsrOpts = append(prsrOpts, parser.EnableVariadicOperatorASTs(true)) + } e.prsr, err = parser.NewParser(prsrOpts...) if err != nil { return nil, err @@ -525,8 +622,6 @@ func (e *Env) initChecker() (*checker.Env, error) { chkOpts := []checker.Option{} chkOpts = append(chkOpts, e.chkOpts...) chkOpts = append(chkOpts, - checker.HomogeneousAggregateLiterals( - e.HasFeature(featureDisableDynamicAggregateLiterals)), checker.CrossTypeNumericComparisons( e.HasFeature(featureCrossTypeNumericComparisons))) @@ -536,19 +631,17 @@ func (e *Env) initChecker() (*checker.Env, error) { return } // Add the statically configured declarations. - err = ce.Add(e.declarations...) + err = ce.AddIdents(e.variables...) if err != nil { e.setCheckerOrError(nil, err) return } // Add the function declarations which are derived from the FunctionDecl instances. for _, fn := range e.functions { - fnDecl, err := functionDeclToExprDecl(fn) - if err != nil { - e.setCheckerOrError(nil, err) - return + if fn.IsDeclarationDisabled() { + continue } - err = ce.Add(fnDecl) + err = ce.AddFunctions(fn) if err != nil { e.setCheckerOrError(nil, err) return @@ -596,17 +689,43 @@ func (e *Env) maybeApplyFeature(feature int, option EnvOption) (*Env, error) { return e, nil } +// computeUnknownVars determines a set of missing variables based on the input activation and the +// environment's configured declaration set. +func (e *Env) computeUnknownVars(vars interpreter.Activation) []*interpreter.AttributePattern { + var unknownPatterns []*interpreter.AttributePattern + for _, v := range e.variables { + varName := v.Name() + if _, found := vars.ResolveName(varName); found { + continue + } + unknownPatterns = append(unknownPatterns, interpreter.NewAttributePattern(varName)) + } + return unknownPatterns +} + +// Error type which references an expression id, a location within source, and a message. +type Error = common.Error + // Issues defines methods for inspecting the error details of parse and check calls. // // Note: in the future, non-fatal warnings and notices may be inspectable via the Issues struct. type Issues struct { errs *common.Errors + info *celast.SourceInfo } // NewIssues returns an Issues struct from a common.Errors object. func NewIssues(errs *common.Errors) *Issues { + return NewIssuesWithSourceInfo(errs, nil) +} + +// NewIssuesWithSourceInfo returns an Issues struct from a common.Errors object with SourceInfo metatata +// which can be used with the `ReportErrorAtID` method for additional error reports within the context +// information that's inferred from an expression id. +func NewIssuesWithSourceInfo(errs *common.Errors, info *celast.SourceInfo) *Issues { return &Issues{ errs: errs, + info: info, } } @@ -622,9 +741,9 @@ func (i *Issues) Err() error { } // Errors returns the collection of errors encountered in more granular detail. -func (i *Issues) Errors() []common.Error { +func (i *Issues) Errors() []*Error { if i == nil { - return []common.Error{} + return []*Error{} } return i.errs.GetErrors() } @@ -648,6 +767,14 @@ func (i *Issues) String() string { return i.errs.ToDisplayString() } +// ReportErrorAtID reports an error message with an optional set of formatting arguments. +// +// The source metadata for the expression at `id`, if present, is attached to the error report. +// To ensure that source metadata is attached to error reports, use NewIssuesWithSourceInfo. +func (i *Issues) ReportErrorAtID(id int64, message string, args ...any) { + i.errs.ReportErrorAtID(id, i.info.GetStartLocation(id), message, args...) +} + // getStdEnv lazy initializes the CEL standard environment. func getStdEnv() (*Env, error) { stdEnvInit.Do(func() { @@ -656,6 +783,97 @@ func getStdEnv() (*Env, error) { return stdEnv, stdEnvErr } +// interopCELTypeProvider layers support for the types.Provider interface on top of a ref.TypeProvider. +type interopCELTypeProvider struct { + ref.TypeProvider +} + +// FindStructType returns a types.Type instance for the given fully-qualified typeName if one exists. +// +// This method proxies to the underyling ref.TypeProvider's FindType method and converts protobuf type +// into a native type representation. If the conversion fails, the type is listed as not found. +func (p *interopCELTypeProvider) FindStructType(typeName string) (*types.Type, bool) { + if et, found := p.FindType(typeName); found { + t, err := types.ExprTypeToType(et) + if err != nil { + return nil, false + } + return t, true + } + return nil, false +} + +// FindStructFieldNames returns an empty set of field for the interop provider. +// +// To inspect the field names, migrate to a `types.Provider` implementation. +func (p *interopCELTypeProvider) FindStructFieldNames(typeName string) ([]string, bool) { + return []string{}, false +} + +// FindStructFieldType returns a types.FieldType instance for the given fully-qualified typeName and field +// name, if one exists. +// +// This method proxies to the underyling ref.TypeProvider's FindFieldType method and converts protobuf type +// into a native type representation. If the conversion fails, the type is listed as not found. +func (p *interopCELTypeProvider) FindStructFieldType(structType, fieldName string) (*types.FieldType, bool) { + if ft, found := p.FindFieldType(structType, fieldName); found { + t, err := types.ExprTypeToType(ft.Type) + if err != nil { + return nil, false + } + return &types.FieldType{ + Type: t, + IsSet: ft.IsSet, + GetFrom: ft.GetFrom, + }, true + } + return nil, false +} + +// interopLegacyTypeProvider layers support for the ref.TypeProvider interface on top of a types.Provider. +type interopLegacyTypeProvider struct { + types.Provider +} + +// FindType retruns the protobuf Type representation for the input type name if one exists. +// +// This method proxies to the underlying types.Provider FindStructType method and converts the types.Type +// value to a protobuf Type representation. +// +// Failure to convert the type will result in the type not being found. +func (p *interopLegacyTypeProvider) FindType(typeName string) (*exprpb.Type, bool) { + if t, found := p.FindStructType(typeName); found { + et, err := types.TypeToExprType(t) + if err != nil { + return nil, false + } + return et, true + } + return nil, false +} + +// FindFieldType returns the protobuf-based FieldType representation for the input type name and field, +// if one exists. +// +// This call proxies to the types.Provider FindStructFieldType method and converts the types.FIeldType +// value to a protobuf-based ref.FieldType representation if found. +// +// Failure to convert the FieldType will result in the field not being found. +func (p *interopLegacyTypeProvider) FindFieldType(structType, fieldName string) (*ref.FieldType, bool) { + if cft, found := p.FindStructFieldType(structType, fieldName); found { + et, err := types.TypeToExprType(cft.Type) + if err != nil { + return nil, false + } + return &ref.FieldType{ + Type: et, + IsSet: cft.IsSet, + GetFrom: cft.GetFrom, + }, true + } + return nil, false +} + var ( stdEnvInit sync.Once stdEnv *Env diff --git a/vendor/github.com/google/cel-go/cel/folding.go b/vendor/github.com/google/cel-go/cel/folding.go new file mode 100644 index 000000000..d7060896d --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/folding.go @@ -0,0 +1,559 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "fmt" + + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" +) + +// ConstantFoldingOption defines a functional option for configuring constant folding. +type ConstantFoldingOption func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error) + +// MaxConstantFoldIterations limits the number of times literals may be folding during optimization. +// +// Defaults to 100 if not set. +func MaxConstantFoldIterations(limit int) ConstantFoldingOption { + return func(opt *constantFoldingOptimizer) (*constantFoldingOptimizer, error) { + opt.maxFoldIterations = limit + return opt, nil + } +} + +// NewConstantFoldingOptimizer creates an optimizer which inlines constant scalar an aggregate +// literal values within function calls and select statements with their evaluated result. +func NewConstantFoldingOptimizer(opts ...ConstantFoldingOption) (ASTOptimizer, error) { + folder := &constantFoldingOptimizer{ + maxFoldIterations: defaultMaxConstantFoldIterations, + } + var err error + for _, o := range opts { + folder, err = o(folder) + if err != nil { + return nil, err + } + } + return folder, nil +} + +type constantFoldingOptimizer struct { + maxFoldIterations int +} + +// Optimize queries the expression graph for scalar and aggregate literal expressions within call and +// select statements and then evaluates them and replaces the call site with the literal result. +// +// Note: only values which can be represented as literals in CEL syntax are supported. +func (opt *constantFoldingOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST { + root := ast.NavigateAST(a) + + // Walk the list of foldable expression and continue to fold until there are no more folds left. + // All of the fold candidates returned by the constantExprMatcher should succeed unless there's + // a logic bug with the selection of expressions. + foldableExprs := ast.MatchDescendants(root, constantExprMatcher) + foldCount := 0 + for len(foldableExprs) != 0 && foldCount < opt.maxFoldIterations { + for _, fold := range foldableExprs { + // If the expression could be folded because it's a non-strict call, and the + // branches are pruned, continue to the next fold. + if fold.Kind() == ast.CallKind && maybePruneBranches(ctx, fold) { + continue + } + // Otherwise, assume all context is needed to evaluate the expression. + err := tryFold(ctx, a, fold) + if err != nil { + ctx.ReportErrorAtID(fold.ID(), "constant-folding evaluation failed: %v", err.Error()) + return a + } + } + foldCount++ + foldableExprs = ast.MatchDescendants(root, constantExprMatcher) + } + // Once all of the constants have been folded, try to run through the remaining comprehensions + // one last time. In this case, there's no guarantee they'll run, so we only update the + // target comprehension node with the literal value if the evaluation succeeds. + for _, compre := range ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind)) { + tryFold(ctx, a, compre) + } + + // If the output is a list, map, or struct which contains optional entries, then prune it + // to make sure that the optionals, if resolved, do not surface in the output literal. + pruneOptionalElements(ctx, root) + + // Ensure that all intermediate values in the folded expression can be represented as valid + // CEL literals within the AST structure. Use `PostOrderVisit` rather than `MatchDescendents` + // to avoid extra allocations during this final pass through the AST. + ast.PostOrderVisit(root, ast.NewExprVisitor(func(e ast.Expr) { + if e.Kind() != ast.LiteralKind { + return + } + val := e.AsLiteral() + adapted, err := adaptLiteral(ctx, val) + if err != nil { + ctx.ReportErrorAtID(root.ID(), "constant-folding evaluation failed: %v", err.Error()) + return + } + ctx.UpdateExpr(e, adapted) + })) + + return a +} + +// tryFold attempts to evaluate a sub-expression to a literal. +// +// If the evaluation succeeds, the input expr value will be modified to become a literal, otherwise +// the method will return an error. +func tryFold(ctx *OptimizerContext, a *ast.AST, expr ast.Expr) error { + // Assume all context is needed to evaluate the expression. + subAST := &Ast{ + impl: ast.NewCheckedAST(ast.NewAST(expr, a.SourceInfo()), a.TypeMap(), a.ReferenceMap()), + } + prg, err := ctx.Program(subAST) + if err != nil { + return err + } + out, _, err := prg.Eval(NoVars()) + if err != nil { + return err + } + // Update the fold expression to be a literal. + ctx.UpdateExpr(expr, ctx.NewLiteral(out)) + return nil +} + +// maybePruneBranches inspects the non-strict call expression to determine whether +// a branch can be removed. Evaluation will naturally prune logical and / or calls, +// but conditional will not be pruned cleanly, so this is one small area where the +// constant folding step reimplements a portion of the evaluator. +func maybePruneBranches(ctx *OptimizerContext, expr ast.NavigableExpr) bool { + call := expr.AsCall() + args := call.Args() + switch call.FunctionName() { + case operators.LogicalAnd, operators.LogicalOr: + return maybeShortcircuitLogic(ctx, call.FunctionName(), args, expr) + case operators.Conditional: + cond := args[0] + truthy := args[1] + falsy := args[2] + if cond.Kind() != ast.LiteralKind { + return false + } + if cond.AsLiteral() == types.True { + ctx.UpdateExpr(expr, truthy) + } else { + ctx.UpdateExpr(expr, falsy) + } + return true + case operators.In: + haystack := args[1] + if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 { + ctx.UpdateExpr(expr, ctx.NewLiteral(types.False)) + return true + } + needle := args[0] + if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind { + needleValue := needle.AsLiteral() + list := haystack.AsList() + for _, e := range list.Elements() { + if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True { + ctx.UpdateExpr(expr, ctx.NewLiteral(types.True)) + return true + } + } + } + } + return false +} + +func maybeShortcircuitLogic(ctx *OptimizerContext, function string, args []ast.Expr, expr ast.NavigableExpr) bool { + shortcircuit := types.False + skip := types.True + if function == operators.LogicalOr { + shortcircuit = types.True + skip = types.False + } + newArgs := []ast.Expr{} + for _, arg := range args { + if arg.Kind() != ast.LiteralKind { + newArgs = append(newArgs, arg) + continue + } + if arg.AsLiteral() == skip { + continue + } + if arg.AsLiteral() == shortcircuit { + ctx.UpdateExpr(expr, arg) + return true + } + } + if len(newArgs) == 0 { + newArgs = append(newArgs, args[0]) + ctx.UpdateExpr(expr, newArgs[0]) + return true + } + if len(newArgs) == 1 { + ctx.UpdateExpr(expr, newArgs[0]) + return true + } + ctx.UpdateExpr(expr, ctx.NewCall(function, newArgs...)) + return true +} + +// pruneOptionalElements works from the bottom up to resolve optional elements within +// aggregate literals. +// +// Note, many aggregate literals will be resolved as arguments to functions or select +// statements, so this method exists to handle the case where the literal could not be +// fully resolved or exists outside of a call, select, or comprehension context. +func pruneOptionalElements(ctx *OptimizerContext, root ast.NavigableExpr) { + aggregateLiterals := ast.MatchDescendants(root, aggregateLiteralMatcher) + for _, lit := range aggregateLiterals { + switch lit.Kind() { + case ast.ListKind: + pruneOptionalListElements(ctx, lit) + case ast.MapKind: + pruneOptionalMapEntries(ctx, lit) + case ast.StructKind: + pruneOptionalStructFields(ctx, lit) + } + } +} + +func pruneOptionalListElements(ctx *OptimizerContext, e ast.Expr) { + l := e.AsList() + elems := l.Elements() + optIndices := l.OptionalIndices() + if len(optIndices) == 0 { + return + } + updatedElems := []ast.Expr{} + updatedIndices := []int32{} + newOptIndex := -1 + for _, e := range elems { + newOptIndex++ + if !l.IsOptional(int32(newOptIndex)) { + updatedElems = append(updatedElems, e) + continue + } + if e.Kind() != ast.LiteralKind { + updatedElems = append(updatedElems, e) + updatedIndices = append(updatedIndices, int32(newOptIndex)) + continue + } + optElemVal, ok := e.AsLiteral().(*types.Optional) + if !ok { + updatedElems = append(updatedElems, e) + updatedIndices = append(updatedIndices, int32(newOptIndex)) + continue + } + if !optElemVal.HasValue() { + newOptIndex-- // Skipping causes the list to get smaller. + continue + } + ctx.UpdateExpr(e, ctx.NewLiteral(optElemVal.GetValue())) + updatedElems = append(updatedElems, e) + } + ctx.UpdateExpr(e, ctx.NewList(updatedElems, updatedIndices)) +} + +func pruneOptionalMapEntries(ctx *OptimizerContext, e ast.Expr) { + m := e.AsMap() + entries := m.Entries() + updatedEntries := []ast.EntryExpr{} + modified := false + for _, e := range entries { + entry := e.AsMapEntry() + key := entry.Key() + val := entry.Value() + // If the entry is not optional, or the value-side of the optional hasn't + // been resolved to a literal, then preserve the entry as-is. + if !entry.IsOptional() || val.Kind() != ast.LiteralKind { + updatedEntries = append(updatedEntries, e) + continue + } + optElemVal, ok := val.AsLiteral().(*types.Optional) + if !ok { + updatedEntries = append(updatedEntries, e) + continue + } + // When the key is not a literal, but the value is, then it needs to be + // restored to an optional value. + if key.Kind() != ast.LiteralKind { + undoOptVal, err := adaptLiteral(ctx, optElemVal) + if err != nil { + ctx.ReportErrorAtID(val.ID(), "invalid map value literal %v: %v", optElemVal, err) + } + ctx.UpdateExpr(val, undoOptVal) + updatedEntries = append(updatedEntries, e) + continue + } + modified = true + if !optElemVal.HasValue() { + continue + } + ctx.UpdateExpr(val, ctx.NewLiteral(optElemVal.GetValue())) + updatedEntry := ctx.NewMapEntry(key, val, false) + updatedEntries = append(updatedEntries, updatedEntry) + } + if modified { + ctx.UpdateExpr(e, ctx.NewMap(updatedEntries)) + } +} + +func pruneOptionalStructFields(ctx *OptimizerContext, e ast.Expr) { + s := e.AsStruct() + fields := s.Fields() + updatedFields := []ast.EntryExpr{} + modified := false + for _, f := range fields { + field := f.AsStructField() + val := field.Value() + if !field.IsOptional() || val.Kind() != ast.LiteralKind { + updatedFields = append(updatedFields, f) + continue + } + optElemVal, ok := val.AsLiteral().(*types.Optional) + if !ok { + updatedFields = append(updatedFields, f) + continue + } + modified = true + if !optElemVal.HasValue() { + continue + } + ctx.UpdateExpr(val, ctx.NewLiteral(optElemVal.GetValue())) + updatedField := ctx.NewStructField(field.Name(), val, false) + updatedFields = append(updatedFields, updatedField) + } + if modified { + ctx.UpdateExpr(e, ctx.NewStruct(s.TypeName(), updatedFields)) + } +} + +// adaptLiteral converts a runtime CEL value to its equivalent literal expression. +// +// For strongly typed values, the type-provider will be used to reconstruct the fields +// which are present in the literal and their equivalent initialization values. +func adaptLiteral(ctx *OptimizerContext, val ref.Val) (ast.Expr, error) { + switch t := val.Type().(type) { + case *types.Type: + switch t { + case types.BoolType, types.BytesType, types.DoubleType, types.IntType, + types.NullType, types.StringType, types.UintType: + return ctx.NewLiteral(val), nil + case types.DurationType: + return ctx.NewCall( + overloads.TypeConvertDuration, + ctx.NewLiteral(val.ConvertToType(types.StringType)), + ), nil + case types.TimestampType: + return ctx.NewCall( + overloads.TypeConvertTimestamp, + ctx.NewLiteral(val.ConvertToType(types.StringType)), + ), nil + case types.OptionalType: + opt := val.(*types.Optional) + if !opt.HasValue() { + return ctx.NewCall("optional.none"), nil + } + target, err := adaptLiteral(ctx, opt.GetValue()) + if err != nil { + return nil, err + } + return ctx.NewCall("optional.of", target), nil + case types.TypeType: + return ctx.NewIdent(val.(*types.Type).TypeName()), nil + case types.ListType: + l, ok := val.(traits.Lister) + if !ok { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + elems := make([]ast.Expr, l.Size().(types.Int)) + idx := 0 + it := l.Iterator() + for it.HasNext() == types.True { + elemVal := it.Next() + elemExpr, err := adaptLiteral(ctx, elemVal) + if err != nil { + return nil, err + } + elems[idx] = elemExpr + idx++ + } + return ctx.NewList(elems, []int32{}), nil + case types.MapType: + m, ok := val.(traits.Mapper) + if !ok { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + entries := make([]ast.EntryExpr, m.Size().(types.Int)) + idx := 0 + it := m.Iterator() + for it.HasNext() == types.True { + keyVal := it.Next() + keyExpr, err := adaptLiteral(ctx, keyVal) + if err != nil { + return nil, err + } + valVal := m.Get(keyVal) + valExpr, err := adaptLiteral(ctx, valVal) + if err != nil { + return nil, err + } + entries[idx] = ctx.NewMapEntry(keyExpr, valExpr, false) + idx++ + } + return ctx.NewMap(entries), nil + default: + provider := ctx.CELTypeProvider() + fields, found := provider.FindStructFieldNames(t.TypeName()) + if !found { + return nil, fmt.Errorf("failed to adapt %v to literal", val) + } + tester := val.(traits.FieldTester) + indexer := val.(traits.Indexer) + fieldInits := []ast.EntryExpr{} + for _, f := range fields { + field := types.String(f) + if tester.IsSet(field) != types.True { + continue + } + fieldVal := indexer.Get(field) + fieldExpr, err := adaptLiteral(ctx, fieldVal) + if err != nil { + return nil, err + } + fieldInits = append(fieldInits, ctx.NewStructField(f, fieldExpr, false)) + } + return ctx.NewStruct(t.TypeName(), fieldInits), nil + } + } + return nil, fmt.Errorf("failed to adapt %v to literal", val) +} + +// constantExprMatcher matches calls, select statements, and comprehensions whose arguments +// are all constant scalar or aggregate literal values. +// +// Only comprehensions which are not nested are included as possible constant folds, and only +// if all variables referenced in the comprehension stack exist are only iteration or +// accumulation variables. +func constantExprMatcher(e ast.NavigableExpr) bool { + switch e.Kind() { + case ast.CallKind: + return constantCallMatcher(e) + case ast.SelectKind: + sel := e.AsSelect() // guaranteed to be a navigable value + return constantMatcher(sel.Operand().(ast.NavigableExpr)) + case ast.ComprehensionKind: + if isNestedComprehension(e) { + return false + } + vars := map[string]bool{} + constantExprs := true + visitor := ast.NewExprVisitor(func(e ast.Expr) { + if e.Kind() == ast.ComprehensionKind { + nested := e.AsComprehension() + vars[nested.AccuVar()] = true + vars[nested.IterVar()] = true + } + if e.Kind() == ast.IdentKind && !vars[e.AsIdent()] { + constantExprs = false + } + }) + ast.PreOrderVisit(e, visitor) + return constantExprs + default: + return false + } +} + +// constantCallMatcher identifies strict and non-strict calls which can be folded. +func constantCallMatcher(e ast.NavigableExpr) bool { + call := e.AsCall() + children := e.Children() + fnName := call.FunctionName() + if fnName == operators.LogicalAnd { + for _, child := range children { + if child.Kind() == ast.LiteralKind { + return true + } + } + } + if fnName == operators.LogicalOr { + for _, child := range children { + if child.Kind() == ast.LiteralKind { + return true + } + } + } + if fnName == operators.Conditional { + cond := children[0] + if cond.Kind() == ast.LiteralKind && cond.AsLiteral().Type() == types.BoolType { + return true + } + } + if fnName == operators.In { + haystack := children[1] + if haystack.Kind() == ast.ListKind && haystack.AsList().Size() == 0 { + return true + } + needle := children[0] + if needle.Kind() == ast.LiteralKind && haystack.Kind() == ast.ListKind { + needleValue := needle.AsLiteral() + list := haystack.AsList() + for _, e := range list.Elements() { + if e.Kind() == ast.LiteralKind && e.AsLiteral().Equal(needleValue) == types.True { + return true + } + } + } + } + // convert all other calls with constant arguments + for _, child := range children { + if !constantMatcher(child) { + return false + } + } + return true +} + +func isNestedComprehension(e ast.NavigableExpr) bool { + parent, found := e.Parent() + for found { + if parent.Kind() == ast.ComprehensionKind { + return true + } + parent, found = parent.Parent() + } + return false +} + +func aggregateLiteralMatcher(e ast.NavigableExpr) bool { + return e.Kind() == ast.ListKind || e.Kind() == ast.MapKind || e.Kind() == ast.StructKind +} + +var ( + constantMatcher = ast.ConstantValueMatcher() +) + +const ( + defaultMaxConstantFoldIterations = 100 +) diff --git a/vendor/github.com/google/cel-go/cel/inlining.go b/vendor/github.com/google/cel-go/cel/inlining.go new file mode 100644 index 000000000..8c8335d3b --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/inlining.go @@ -0,0 +1,240 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/traits" +) + +// InlineVariable holds a variable name to be matched and an AST representing +// the expression graph which should be used to replace it. +type InlineVariable struct { + name string + alias string + def *ast.AST +} + +// Name returns the qualified variable or field selection to replace. +func (v *InlineVariable) Name() string { + return v.name +} + +// Alias returns the alias to use when performing cel.bind() calls during inlining. +func (v *InlineVariable) Alias() string { + return v.alias +} + +// Expr returns the inlined expression value. +func (v *InlineVariable) Expr() ast.Expr { + return v.def.Expr() +} + +// Type indicates the inlined expression type. +func (v *InlineVariable) Type() *Type { + return v.def.GetType(v.def.Expr().ID()) +} + +// NewInlineVariable declares a variable name to be replaced by a checked expression. +func NewInlineVariable(name string, definition *Ast) *InlineVariable { + return NewInlineVariableWithAlias(name, name, definition) +} + +// NewInlineVariableWithAlias declares a variable name to be replaced by a checked expression. +// If the variable occurs more than once, the provided alias will be used to replace the expressions +// where the variable name occurs. +func NewInlineVariableWithAlias(name, alias string, definition *Ast) *InlineVariable { + return &InlineVariable{name: name, alias: alias, def: definition.impl} +} + +// NewInliningOptimizer creates and optimizer which replaces variables with expression definitions. +// +// If a variable occurs one time, the variable is replaced by the inline definition. If the +// variable occurs more than once, the variable occurences are replaced by a cel.bind() call. +func NewInliningOptimizer(inlineVars ...*InlineVariable) ASTOptimizer { + return &inliningOptimizer{variables: inlineVars} +} + +type inliningOptimizer struct { + variables []*InlineVariable +} + +func (opt *inliningOptimizer) Optimize(ctx *OptimizerContext, a *ast.AST) *ast.AST { + root := ast.NavigateAST(a) + for _, inlineVar := range opt.variables { + matches := ast.MatchDescendants(root, opt.matchVariable(inlineVar.Name())) + // Skip cases where the variable isn't in the expression graph + if len(matches) == 0 { + continue + } + + // For a single match, do a direct replacement of the expression sub-graph. + if len(matches) == 1 || !isBindable(matches, inlineVar.Expr(), inlineVar.Type()) { + for _, match := range matches { + // Copy the inlined AST expr and source info. + copyExpr := copyASTAndMetadata(ctx, inlineVar.def) + opt.inlineExpr(ctx, match, copyExpr, inlineVar.Type()) + } + continue + } + + // For multiple matches, find the least common ancestor (lca) and insert the + // variable as a cel.bind() macro. + var lca ast.NavigableExpr = root + lcaAncestorCount := 0 + ancestors := map[int64]int{} + for _, match := range matches { + // Update the identifier matches with the provided alias. + parent, found := match, true + for found { + ancestorCount, hasAncestor := ancestors[parent.ID()] + if !hasAncestor { + ancestors[parent.ID()] = 1 + parent, found = parent.Parent() + continue + } + if lcaAncestorCount < ancestorCount || (lcaAncestorCount == ancestorCount && lca.Depth() < parent.Depth()) { + lca = parent + lcaAncestorCount = ancestorCount + } + ancestors[parent.ID()] = ancestorCount + 1 + parent, found = parent.Parent() + } + aliasExpr := ctx.NewIdent(inlineVar.Alias()) + opt.inlineExpr(ctx, match, aliasExpr, inlineVar.Type()) + } + + // Copy the inlined AST expr and source info. + copyExpr := copyASTAndMetadata(ctx, inlineVar.def) + // Update the least common ancestor by inserting a cel.bind() call to the alias. + inlined, bindMacro := ctx.NewBindMacro(lca.ID(), inlineVar.Alias(), copyExpr, lca) + opt.inlineExpr(ctx, lca, inlined, inlineVar.Type()) + ctx.sourceInfo.SetMacroCall(lca.ID(), bindMacro) + } + return a +} + +// copyASTAndMetadata copies the input AST and propagates the macro metadata into the AST being +// optimized. +func copyASTAndMetadata(ctx *OptimizerContext, a *ast.AST) ast.Expr { + copyExpr, copyInfo := ctx.CopyAST(a) + // Add in the macro calls from the inlined AST + for id, call := range copyInfo.MacroCalls() { + ctx.sourceInfo.SetMacroCall(id, call) + } + return copyExpr +} + +// inlineExpr replaces the current expression with the inlined one, unless the location of the inlining +// happens within a presence test, e.g. has(a.b.c) -> inline alpha for a.b.c in which case an attempt is +// made to determine whether the inlined value can be presence or existence tested. +func (opt *inliningOptimizer) inlineExpr(ctx *OptimizerContext, prev ast.NavigableExpr, inlined ast.Expr, inlinedType *Type) { + switch prev.Kind() { + case ast.SelectKind: + sel := prev.AsSelect() + if !sel.IsTestOnly() { + ctx.UpdateExpr(prev, inlined) + return + } + opt.rewritePresenceExpr(ctx, prev, inlined, inlinedType) + default: + ctx.UpdateExpr(prev, inlined) + } +} + +// rewritePresenceExpr converts the inlined expression, when it occurs within a has() macro, to type-safe +// expression appropriate for the inlined type, if possible. +// +// If the rewrite is not possible an error is reported at the inline expression site. +func (opt *inliningOptimizer) rewritePresenceExpr(ctx *OptimizerContext, prev, inlined ast.Expr, inlinedType *Type) { + // If the input inlined expression is not a select expression it won't work with the has() + // macro. Attempt to rewrite the presence test in terms of the typed input, otherwise error. + if inlined.Kind() == ast.SelectKind { + presenceTest, hasMacro := ctx.NewHasMacro(prev.ID(), inlined) + ctx.UpdateExpr(prev, presenceTest) + ctx.sourceInfo.SetMacroCall(prev.ID(), hasMacro) + return + } + + ctx.sourceInfo.ClearMacroCall(prev.ID()) + if inlinedType.IsAssignableType(NullType) { + ctx.UpdateExpr(prev, + ctx.NewCall(operators.NotEquals, + inlined, + ctx.NewLiteral(types.NullValue), + )) + return + } + if inlinedType.HasTrait(traits.SizerType) { + ctx.UpdateExpr(prev, + ctx.NewCall(operators.NotEquals, + ctx.NewMemberCall(overloads.Size, inlined), + ctx.NewLiteral(types.IntZero), + )) + return + } + ctx.ReportErrorAtID(prev.ID(), "unable to inline expression type %v into presence test", inlinedType) +} + +// isBindable indicates whether the inlined type can be used within a cel.bind() if the expression +// being replaced occurs within a presence test. Value types with a size() method or field selection +// support can be bound. +// +// In future iterations, support may also be added for indexer types which can be rewritten as an `in` +// expression; however, this would imply a rewrite of the inlined expression that may not be necessary +// in most cases. +func isBindable(matches []ast.NavigableExpr, inlined ast.Expr, inlinedType *Type) bool { + if inlinedType.IsAssignableType(NullType) || + inlinedType.HasTrait(traits.SizerType) || + inlinedType.HasTrait(traits.FieldTesterType) { + return true + } + for _, m := range matches { + if m.Kind() != ast.SelectKind { + continue + } + sel := m.AsSelect() + if sel.IsTestOnly() { + return false + } + } + return true +} + +// matchVariable matches simple identifiers, select expressions, and presence test expressions +// which match the (potentially) qualified variable name provided as input. +// +// Note, this function does not support inlining against select expressions which includes optional +// field selection. This may be a future refinement. +func (opt *inliningOptimizer) matchVariable(varName string) ast.ExprMatcher { + return func(e ast.NavigableExpr) bool { + if e.Kind() == ast.IdentKind && e.AsIdent() == varName { + return true + } + if e.Kind() == ast.SelectKind { + sel := e.AsSelect() + // While the `ToQualifiedName` call could take the select directly, this + // would skip presence tests from possible matches, which we would like + // to include. + qualName, found := containers.ToQualifiedName(sel.Operand()) + return found && qualName+"."+sel.FieldName() == varName + } + return false + } +} diff --git a/vendor/github.com/google/cel-go/cel/io.go b/vendor/github.com/google/cel-go/cel/io.go index 93ded3cf1..3133fb9d7 100644 --- a/vendor/github.com/google/cel-go/cel/io.go +++ b/vendor/github.com/google/cel-go/cel/io.go @@ -22,6 +22,7 @@ import ( "google.golang.org/protobuf/proto" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" @@ -33,7 +34,8 @@ import ( // CheckedExprToAst converts a checked expression proto message to an Ast. func CheckedExprToAst(checkedExpr *exprpb.CheckedExpr) *Ast { - return CheckedExprToAstWithSource(checkedExpr, nil) + checked, _ := CheckedExprToAstWithSource(checkedExpr, nil) + return checked } // CheckedExprToAstWithSource converts a checked expression proto message to an Ast, @@ -44,29 +46,12 @@ func CheckedExprToAst(checkedExpr *exprpb.CheckedExpr) *Ast { // through future calls. // // Prefer CheckedExprToAst if loading expressions from storage. -func CheckedExprToAstWithSource(checkedExpr *exprpb.CheckedExpr, src Source) *Ast { - refMap := checkedExpr.GetReferenceMap() - if refMap == nil { - refMap = map[int64]*exprpb.Reference{} - } - typeMap := checkedExpr.GetTypeMap() - if typeMap == nil { - typeMap = map[int64]*exprpb.Type{} - } - si := checkedExpr.GetSourceInfo() - if si == nil { - si = &exprpb.SourceInfo{} - } - if src == nil { - src = common.NewInfoSource(si) - } - return &Ast{ - expr: checkedExpr.GetExpr(), - info: si, - source: src, - refMap: refMap, - typeMap: typeMap, +func CheckedExprToAstWithSource(checkedExpr *exprpb.CheckedExpr, src Source) (*Ast, error) { + checked, err := ast.ToAST(checkedExpr) + if err != nil { + return nil, err } + return &Ast{source: src, impl: checked}, nil } // AstToCheckedExpr converts an Ast to an protobuf CheckedExpr value. @@ -76,12 +61,7 @@ func AstToCheckedExpr(a *Ast) (*exprpb.CheckedExpr, error) { if !a.IsChecked() { return nil, fmt.Errorf("cannot convert unchecked ast") } - return &exprpb.CheckedExpr{ - Expr: a.Expr(), - SourceInfo: a.SourceInfo(), - ReferenceMap: a.refMap, - TypeMap: a.typeMap, - }, nil + return ast.ToProto(a.impl) } // ParsedExprToAst converts a parsed expression proto message to an Ast. @@ -97,18 +77,12 @@ func ParsedExprToAst(parsedExpr *exprpb.ParsedExpr) *Ast { // // Prefer ParsedExprToAst if loading expressions from storage. func ParsedExprToAstWithSource(parsedExpr *exprpb.ParsedExpr, src Source) *Ast { - si := parsedExpr.GetSourceInfo() - if si == nil { - si = &exprpb.SourceInfo{} - } + info, _ := ast.ProtoToSourceInfo(parsedExpr.GetSourceInfo()) if src == nil { - src = common.NewInfoSource(si) - } - return &Ast{ - expr: parsedExpr.GetExpr(), - info: si, - source: src, + src = common.NewInfoSource(parsedExpr.GetSourceInfo()) } + e, _ := ast.ProtoToExpr(parsedExpr.GetExpr()) + return &Ast{source: src, impl: ast.NewAST(e, info)} } // AstToParsedExpr converts an Ast to an protobuf ParsedExpr value. @@ -124,9 +98,7 @@ func AstToParsedExpr(a *Ast) (*exprpb.ParsedExpr, error) { // Note, the conversion may not be an exact replica of the original expression, but will produce // a string that is semantically equivalent and whose textual representation is stable. func AstToString(a *Ast) (string, error) { - expr := a.Expr() - info := a.SourceInfo() - return parser.Unparse(expr, info) + return parser.Unparse(a.impl.Expr(), a.impl.SourceInfo()) } // RefValueToValue converts between ref.Val and api.expr.Value. @@ -202,7 +174,7 @@ func RefValueToValue(res ref.Val) (*exprpb.Value, error) { } var ( - typeNameToTypeValue = map[string]*types.TypeValue{ + typeNameToTypeValue = map[string]ref.Val{ "bool": types.BoolType, "bytes": types.BytesType, "double": types.DoubleType, @@ -219,7 +191,7 @@ var ( ) // ValueToRefValue converts between exprpb.Value and ref.Val. -func ValueToRefValue(adapter ref.TypeAdapter, v *exprpb.Value) (ref.Val, error) { +func ValueToRefValue(adapter types.Adapter, v *exprpb.Value) (ref.Val, error) { switch v.Kind.(type) { case *exprpb.Value_NullValue: return types.NullValue, nil diff --git a/vendor/github.com/google/cel-go/cel/library.go b/vendor/github.com/google/cel-go/cel/library.go index bcfd44f78..deddc14e5 100644 --- a/vendor/github.com/google/cel-go/cel/library.go +++ b/vendor/github.com/google/cel-go/cel/library.go @@ -15,26 +15,25 @@ package cel import ( + "math" "strconv" "strings" "time" - "github.com/google/cel-go/checker" - "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/stdlib" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" "github.com/google/cel-go/interpreter" - "github.com/google/cel-go/interpreter/functions" "github.com/google/cel-go/parser" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) const ( optMapMacro = "optMap" + optFlatMapMacro = "optFlatMap" hasValueFunc = "hasValue" optionalNoneFunc = "optional.none" optionalOfFunc = "optional.of" @@ -106,45 +105,214 @@ func (stdLibrary) LibraryName() string { return "cel.lib.std" } -// EnvOptions returns options for the standard CEL function declarations and macros. +// CompileOptions returns options for the standard CEL function declarations and macros. func (stdLibrary) CompileOptions() []EnvOption { return []EnvOption{ - Declarations(checker.StandardDeclarations()...), + func(e *Env) (*Env, error) { + var err error + for _, fn := range stdlib.Functions() { + existing, found := e.functions[fn.Name()] + if found { + fn, err = existing.Merge(fn) + if err != nil { + return nil, err + } + } + e.functions[fn.Name()] = fn + } + return e, nil + }, + func(e *Env) (*Env, error) { + e.variables = append(e.variables, stdlib.Types()...) + return e, nil + }, Macros(StandardMacros...), } } // ProgramOptions returns function implementations for the standard CEL functions. func (stdLibrary) ProgramOptions() []ProgramOption { - return []ProgramOption{ - Functions(functions.StandardOverloads()...), + return []ProgramOption{} +} + +// OptionalTypes enable support for optional syntax and types in CEL. +// +// The optional value type makes it possible to express whether variables have +// been provided, whether a result has been computed, and in the future whether +// an object field path, map key value, or list index has a value. +// +// # Syntax Changes +// +// OptionalTypes are unlike other CEL extensions because they modify the CEL +// syntax itself, notably through the use of a `?` preceding a field name or +// index value. +// +// ## Field Selection +// +// The optional syntax in field selection is denoted as `obj.?field`. In other +// words, if a field is set, return `optional.of(obj.field)“, else +// `optional.none()`. The optional field selection is viral in the sense that +// after the first optional selection all subsequent selections or indices +// are treated as optional, i.e. the following expressions are equivalent: +// +// obj.?field.subfield +// obj.?field.?subfield +// +// ## Indexing +// +// Similar to field selection, the optional syntax can be used in index +// expressions on maps and lists: +// +// list[?0] +// map[?key] +// +// ## Optional Field Setting +// +// When creating map or message literals, if a field may be optionally set +// based on its presence, then placing a `?` before the field name or key +// will ensure the type on the right-hand side must be optional(T) where T +// is the type of the field or key-value. +// +// The following returns a map with the key expression set only if the +// subfield is present, otherwise an empty map is created: +// +// {?key: obj.?field.subfield} +// +// ## Optional Element Setting +// +// When creating list literals, an element in the list may be optionally added +// when the element expression is preceded by a `?`: +// +// [a, ?b, ?c] // return a list with either [a], [a, b], [a, b, c], or [a, c] +// +// # Optional.Of +// +// Create an optional(T) value of a given value with type T. +// +// optional.of(10) +// +// # Optional.OfNonZeroValue +// +// Create an optional(T) value of a given value with type T if it is not a +// zero-value. A zero-value the default empty value for any given CEL type, +// including empty protobuf message types. If the value is empty, the result +// of this call will be optional.none(). +// +// optional.ofNonZeroValue([1, 2, 3]) // optional(list(int)) +// optional.ofNonZeroValue([]) // optional.none() +// optional.ofNonZeroValue(0) // optional.none() +// optional.ofNonZeroValue("") // optional.none() +// +// # Optional.None +// +// Create an empty optional value. +// +// # HasValue +// +// Determine whether the optional contains a value. +// +// optional.of(b'hello').hasValue() // true +// optional.ofNonZeroValue({}).hasValue() // false +// +// # Value +// +// Get the value contained by the optional. If the optional does not have a +// value, the result will be a CEL error. +// +// optional.of(b'hello').value() // b'hello' +// optional.ofNonZeroValue({}).value() // error +// +// # Or +// +// If the value on the left-hand side is optional.none(), the optional value +// on the right hand side is returned. If the value on the left-hand set is +// valued, then it is returned. This operation is short-circuiting and will +// only evaluate as many links in the `or` chain as are needed to return a +// non-empty optional value. +// +// obj.?field.or(m[?key]) +// l[?index].or(obj.?field.subfield).or(obj.?other) +// +// # OrValue +// +// Either return the value contained within the optional on the left-hand side +// or return the alternative value on the right hand side. +// +// m[?key].orValue("none") +// +// # OptMap +// +// Apply a transformation to the optional's underlying value if it is not empty +// and return an optional typed result based on the transformation. The +// transformation expression type must return a type T which is wrapped into +// an optional. +// +// msg.?elements.optMap(e, e.size()).orValue(0) +// +// # OptFlatMap +// +// Introduced in version: 1 +// +// Apply a transformation to the optional's underlying value if it is not empty +// and return the result. The transform expression must return an optional(T) +// rather than type T. This can be useful when dealing with zero values and +// conditionally generating an empty or non-empty result in ways which cannot +// be expressed with `optMap`. +// +// msg.?elements.optFlatMap(e, e[?0]) // return the first element if present. +func OptionalTypes(opts ...OptionalTypesOption) EnvOption { + lib := &optionalLib{version: math.MaxUint32} + for _, opt := range opts { + lib = opt(lib) } + return Lib(lib) +} + +type optionalLib struct { + version uint32 } -type optionalLibrary struct{} +// OptionalTypesOption is a functional interface for configuring the strings library. +type OptionalTypesOption func(*optionalLib) *optionalLib + +// OptionalTypesVersion configures the version of the optional type library. +// +// The version limits which functions are available. Only functions introduced +// below or equal to the given version included in the library. If this option +// is not set, all functions are available. +// +// See the library documentation to determine which version a function was introduced. +// If the documentation does not state which version a function was introduced, it can +// be assumed to be introduced at version 0, when the library was first created. +func OptionalTypesVersion(version uint32) OptionalTypesOption { + return func(lib *optionalLib) *optionalLib { + lib.version = version + return lib + } +} // LibraryName implements the SingletonLibrary interface method. -func (optionalLibrary) LibraryName() string { +func (lib *optionalLib) LibraryName() string { return "cel.lib.optional" } // CompileOptions implements the Library interface method. -func (optionalLibrary) CompileOptions() []EnvOption { +func (lib *optionalLib) CompileOptions() []EnvOption { paramTypeK := TypeParamType("K") paramTypeV := TypeParamType("V") optionalTypeV := OptionalType(paramTypeV) listTypeV := ListType(paramTypeV) mapTypeKV := MapType(paramTypeK, paramTypeV) - return []EnvOption{ + opts := []EnvOption{ // Enable the optional syntax in the parser. enableOptionalSyntax(), // Introduce the optional type. Types(types.OptionalType), - // Configure the optMap macro. - Macros(NewReceiverMacro(optMapMacro, 2, optMap)), + // Configure the optMap and optFlatMap macros. + Macros(ReceiverMacro(optMapMacro, 2, optMap)), // Global and member functions for working with optional values. Function(optionalOfFunc, @@ -202,46 +370,73 @@ func (optionalLibrary) CompileOptions() []EnvOption { // Index overloads to accommodate using an optional value as the operand. Function(operators.Index, Overload("optional_list_index_int", []*Type{OptionalType(listTypeV), IntType}, optionalTypeV), - Overload("optional_map_index_optional_value", []*Type{OptionalType(mapTypeKV), paramTypeK}, optionalTypeV)), + Overload("optional_map_index_value", []*Type{OptionalType(mapTypeKV), paramTypeK}, optionalTypeV)), + } + if lib.version >= 1 { + opts = append(opts, Macros(ReceiverMacro(optFlatMapMacro, 2, optFlatMap))) } + return opts } -func optMap(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +// ProgramOptions implements the Library interface method. +func (lib *optionalLib) ProgramOptions() []ProgramOption { + return []ProgramOption{ + CustomDecorator(decorateOptionalOr), + } +} + +func optMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) { varIdent := args[0] varName := "" - switch varIdent.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - varName = varIdent.GetIdentExpr().GetName() + switch varIdent.Kind() { + case ast.IdentKind: + varName = varIdent.AsIdent() default: - return nil, &common.Error{ - Message: "optMap() variable name must be a simple identifier", - Location: meh.OffsetLocation(varIdent.GetId()), - } + return nil, meh.NewError(varIdent.ID(), "optMap() variable name must be a simple identifier") } mapExpr := args[1] - return meh.GlobalCall( + return meh.NewCall( operators.Conditional, - meh.ReceiverCall(hasValueFunc, target), - meh.GlobalCall(optionalOfFunc, - meh.Fold( - unusedIterVar, + meh.NewMemberCall(hasValueFunc, target), + meh.NewCall(optionalOfFunc, + meh.NewComprehension( meh.NewList(), + unusedIterVar, varName, - meh.ReceiverCall(valueFunc, target), - meh.LiteralBool(false), - meh.Ident(varName), + meh.NewMemberCall(valueFunc, target), + meh.NewLiteral(types.False), + meh.NewIdent(varName), mapExpr, ), ), - meh.GlobalCall(optionalNoneFunc), + meh.NewCall(optionalNoneFunc), ), nil } -// ProgramOptions implements the Library interface method. -func (optionalLibrary) ProgramOptions() []ProgramOption { - return []ProgramOption{ - CustomDecorator(decorateOptionalOr), +func optFlatMap(meh MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *Error) { + varIdent := args[0] + varName := "" + switch varIdent.Kind() { + case ast.IdentKind: + varName = varIdent.AsIdent() + default: + return nil, meh.NewError(varIdent.ID(), "optFlatMap() variable name must be a simple identifier") } + mapExpr := args[1] + return meh.NewCall( + operators.Conditional, + meh.NewMemberCall(hasValueFunc, target), + meh.NewComprehension( + meh.NewList(), + unusedIterVar, + varName, + meh.NewMemberCall(valueFunc, target), + meh.NewLiteral(types.False), + meh.NewIdent(varName), + mapExpr, + ), + meh.NewCall(optionalNoneFunc), + ), nil } func enableOptionalSyntax() EnvOption { @@ -358,28 +553,16 @@ var ( timeOverloadDeclarations = []EnvOption{ Function(overloads.TimeGetHours, MemberOverload(overloads.DurationToHours, []*Type{DurationType}, IntType, - UnaryBinding(func(dur ref.Val) ref.Val { - d := dur.(types.Duration) - return types.Int(d.Hours()) - }))), + UnaryBinding(types.DurationGetHours))), Function(overloads.TimeGetMinutes, MemberOverload(overloads.DurationToMinutes, []*Type{DurationType}, IntType, - UnaryBinding(func(dur ref.Val) ref.Val { - d := dur.(types.Duration) - return types.Int(d.Minutes()) - }))), + UnaryBinding(types.DurationGetMinutes))), Function(overloads.TimeGetSeconds, MemberOverload(overloads.DurationToSeconds, []*Type{DurationType}, IntType, - UnaryBinding(func(dur ref.Val) ref.Val { - d := dur.(types.Duration) - return types.Int(d.Seconds()) - }))), + UnaryBinding(types.DurationGetSeconds))), Function(overloads.TimeGetMilliseconds, MemberOverload(overloads.DurationToMilliseconds, []*Type{DurationType}, IntType, - UnaryBinding(func(dur ref.Val) ref.Val { - d := dur.(types.Duration) - return types.Int(d.Milliseconds()) - }))), + UnaryBinding(types.DurationGetMilliseconds))), Function(overloads.TimeGetFullYear, MemberOverload(overloads.TimestampToYear, []*Type{TimestampType}, IntType, UnaryBinding(func(ts ref.Val) ref.Val { diff --git a/vendor/github.com/google/cel-go/cel/macro.go b/vendor/github.com/google/cel-go/cel/macro.go index e48c5bf8e..4db1fd57a 100644 --- a/vendor/github.com/google/cel-go/cel/macro.go +++ b/vendor/github.com/google/cel-go/cel/macro.go @@ -15,7 +15,11 @@ package cel import ( + "fmt" + "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" @@ -27,7 +31,14 @@ import ( // a Macro should be created per arg-count or as a var arg macro. type Macro = parser.Macro -// MacroExpander converts a call and its associated arguments into a new CEL abstract syntax tree. +// MacroFactory defines an expansion function which converts a call and its arguments to a cel.Expr value. +type MacroFactory = parser.MacroExpander + +// MacroExprFactory assists with the creation of Expr values in a manner which is consistent +// the internal semantics and id generation behaviors of the parser and checker libraries. +type MacroExprFactory = parser.ExprHelper + +// MacroExpander converts a call and its associated arguments into a protobuf Expr representation. // // If the MacroExpander determines within the implementation that an expansion is not needed it may return // a nil Expr value to indicate a non-match. However, if an expansion is to be performed, but the arguments @@ -37,48 +48,197 @@ type Macro = parser.Macro // and produces as output an Expr ast node. // // Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil. -type MacroExpander = parser.MacroExpander +type MacroExpander func(eh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) // MacroExprHelper exposes helper methods for creating new expressions within a CEL abstract syntax tree. -type MacroExprHelper = parser.ExprHelper +// ExprHelper assists with the manipulation of proto-based Expr values in a manner which is +// consistent with the source position and expression id generation code leveraged by both +// the parser and type-checker. +type MacroExprHelper interface { + // Copy the input expression with a brand new set of identifiers. + Copy(*exprpb.Expr) *exprpb.Expr + + // LiteralBool creates an Expr value for a bool literal. + LiteralBool(value bool) *exprpb.Expr + + // LiteralBytes creates an Expr value for a byte literal. + LiteralBytes(value []byte) *exprpb.Expr + + // LiteralDouble creates an Expr value for double literal. + LiteralDouble(value float64) *exprpb.Expr + + // LiteralInt creates an Expr value for an int literal. + LiteralInt(value int64) *exprpb.Expr + + // LiteralString creates am Expr value for a string literal. + LiteralString(value string) *exprpb.Expr + + // LiteralUint creates an Expr value for a uint literal. + LiteralUint(value uint64) *exprpb.Expr + + // NewList creates a CreateList instruction where the list is comprised of the optional set + // of elements provided as arguments. + NewList(elems ...*exprpb.Expr) *exprpb.Expr + + // NewMap creates a CreateStruct instruction for a map where the map is comprised of the + // optional set of key, value entries. + NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + + // NewMapEntry creates a Map Entry for the key, value pair. + NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + + // NewObject creates a CreateStruct instruction for an object with a given type name and + // optional set of field initializers. + NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + + // NewObjectFieldInit creates a new Object field initializer from the field name and value. + NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + + // Fold creates a fold comprehension instruction. + // + // - iterVar is the iteration variable name. + // - iterRange represents the expression that resolves to a list or map where the elements or + // keys (respectively) will be iterated over. + // - accuVar is the accumulation variable name, typically parser.AccumulatorName. + // - accuInit is the initial expression whose value will be set for the accuVar prior to + // folding. + // - condition is the expression to test to determine whether to continue folding. + // - step is the expression to evaluation at the conclusion of a single fold iteration. + // - result is the computation to evaluate at the conclusion of the fold. + // + // The accuVar should not shadow variable names that you would like to reference within the + // environment in the step and condition expressions. Presently, the name __result__ is commonly + // used by built-in macros but this may change in the future. + Fold(iterVar string, + iterRange *exprpb.Expr, + accuVar string, + accuInit *exprpb.Expr, + condition *exprpb.Expr, + step *exprpb.Expr, + result *exprpb.Expr) *exprpb.Expr + + // Ident creates an identifier Expr value. + Ident(name string) *exprpb.Expr + + // AccuIdent returns an accumulator identifier for use with comprehension results. + AccuIdent() *exprpb.Expr + + // GlobalCall creates a function call Expr value for a global (free) function. + GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr + + // ReceiverCall creates a function call Expr value for a receiver-style function. + ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr + + // PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. + PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr + + // Select create a field traversal Expr value. + Select(operand *exprpb.Expr, field string) *exprpb.Expr + + // OffsetLocation returns the Location of the expression identifier. + OffsetLocation(exprID int64) common.Location + + // NewError associates an error message with a given expression id. + NewError(exprID int64, message string) *Error +} + +// GlobalMacro creates a Macro for a global function with the specified arg count. +func GlobalMacro(function string, argCount int, factory MacroFactory) Macro { + return parser.NewGlobalMacro(function, argCount, factory) +} + +// ReceiverMacro creates a Macro for a receiver function matching the specified arg count. +func ReceiverMacro(function string, argCount int, factory MacroFactory) Macro { + return parser.NewReceiverMacro(function, argCount, factory) +} + +// GlobalVarArgMacro creates a Macro for a global function with a variable arg count. +func GlobalVarArgMacro(function string, factory MacroFactory) Macro { + return parser.NewGlobalVarArgMacro(function, factory) +} + +// ReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count. +func ReceiverVarArgMacro(function string, factory MacroFactory) Macro { + return parser.NewReceiverVarArgMacro(function, factory) +} // NewGlobalMacro creates a Macro for a global function with the specified arg count. +// +// Deprecated: use GlobalMacro func NewGlobalMacro(function string, argCount int, expander MacroExpander) Macro { - return parser.NewGlobalMacro(function, argCount, expander) + expand := adaptingExpander{expander} + return parser.NewGlobalMacro(function, argCount, expand.Expander) } // NewReceiverMacro creates a Macro for a receiver function matching the specified arg count. +// +// Deprecated: use ReceiverMacro func NewReceiverMacro(function string, argCount int, expander MacroExpander) Macro { - return parser.NewReceiverMacro(function, argCount, expander) + expand := adaptingExpander{expander} + return parser.NewReceiverMacro(function, argCount, expand.Expander) } // NewGlobalVarArgMacro creates a Macro for a global function with a variable arg count. +// +// Deprecated: use GlobalVarArgMacro func NewGlobalVarArgMacro(function string, expander MacroExpander) Macro { - return parser.NewGlobalVarArgMacro(function, expander) + expand := adaptingExpander{expander} + return parser.NewGlobalVarArgMacro(function, expand.Expander) } // NewReceiverVarArgMacro creates a Macro for a receiver function matching a variable arg count. +// +// Deprecated: use ReceiverVarArgMacro func NewReceiverVarArgMacro(function string, expander MacroExpander) Macro { - return parser.NewReceiverVarArgMacro(function, expander) + expand := adaptingExpander{expander} + return parser.NewReceiverVarArgMacro(function, expand.Expander) } // HasMacroExpander expands the input call arguments into a presence test, e.g. has(.field) -func HasMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - return parser.MakeHas(meh, target, args) +func HasMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + arg, err := adaptToExpr(args[0]) + if err != nil { + return nil, err + } + if arg.Kind() == ast.SelectKind { + s := arg.AsSelect() + return adaptToProto(ph.NewPresenceTest(s.Operand(), s.FieldName())) + } + return nil, ph.NewError(arg.ID(), "invalid argument to has() macro") } // ExistsMacroExpander expands the input call arguments into a comprehension that returns true if any of the // elements in the range match the predicate expressions: // .exists(, ) -func ExistsMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - return parser.MakeExists(meh, target, args) +func ExistsMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeExists(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // ExistsOneMacroExpander expands the input call arguments into a comprehension that returns true if exactly // one of the elements in the range match the predicate expressions: // .exists_one(, ) -func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - return parser.MakeExistsOne(meh, target, args) +func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeExistsOne(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // MapMacroExpander expands the input call arguments into a comprehension that transforms each element in the @@ -91,15 +251,31 @@ func ExistsOneMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*ex // // In the second form only iterVar values which return true when provided to the predicate expression // are transformed. -func MapMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - return parser.MakeMap(meh, target, args) +func MapMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeMap(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } // FilterMacroExpander expands the input call arguments into a comprehension which produces a list which contains // only elements which match the provided predicate expression: // .filter(, ) -func FilterMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - return parser.MakeFilter(meh, target, args) +func FilterMacroExpander(meh MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *Error) { + ph, err := toParserHelper(meh) + if err != nil { + return nil, err + } + out, err := parser.MakeFilter(ph, mustAdaptToExpr(target), mustAdaptToExprs(args)) + if err != nil { + return nil, err + } + return adaptToProto(out) } var ( @@ -143,3 +319,258 @@ var ( // NoMacros provides an alias to an empty list of macros NoMacros = []Macro{} ) + +type adaptingExpander struct { + legacyExpander MacroExpander +} + +func (adapt *adaptingExpander) Expander(eh parser.ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { + var legacyTarget *exprpb.Expr = nil + var err *Error = nil + if target != nil { + legacyTarget, err = adaptToProto(target) + if err != nil { + return nil, err + } + } + legacyArgs := make([]*exprpb.Expr, len(args)) + for i, arg := range args { + legacyArgs[i], err = adaptToProto(arg) + if err != nil { + return nil, err + } + } + ah := &adaptingHelper{modernHelper: eh} + legacyExpr, err := adapt.legacyExpander(ah, legacyTarget, legacyArgs) + if err != nil { + return nil, err + } + ex, err := adaptToExpr(legacyExpr) + if err != nil { + return nil, err + } + return ex, nil +} + +func wrapErr(id int64, message string, err error) *common.Error { + return &common.Error{ + Location: common.NoLocation, + Message: fmt.Sprintf("%s: %v", message, err), + ExprID: id, + } +} + +type adaptingHelper struct { + modernHelper parser.ExprHelper +} + +// Copy the input expression with a brand new set of identifiers. +func (ah *adaptingHelper) Copy(e *exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.Copy(mustAdaptToExpr(e))) +} + +// LiteralBool creates an Expr value for a bool literal. +func (ah *adaptingHelper) LiteralBool(value bool) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bool(value))) +} + +// LiteralBytes creates an Expr value for a byte literal. +func (ah *adaptingHelper) LiteralBytes(value []byte) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Bytes(value))) +} + +// LiteralDouble creates an Expr value for double literal. +func (ah *adaptingHelper) LiteralDouble(value float64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Double(value))) +} + +// LiteralInt creates an Expr value for an int literal. +func (ah *adaptingHelper) LiteralInt(value int64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Int(value))) +} + +// LiteralString creates am Expr value for a string literal. +func (ah *adaptingHelper) LiteralString(value string) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.String(value))) +} + +// LiteralUint creates an Expr value for a uint literal. +func (ah *adaptingHelper) LiteralUint(value uint64) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewLiteral(types.Uint(value))) +} + +// NewList creates a CreateList instruction where the list is comprised of the optional set +// of elements provided as arguments. +func (ah *adaptingHelper) NewList(elems ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewList(mustAdaptToExprs(elems)...)) +} + +// NewMap creates a CreateStruct instruction for a map where the map is comprised of the +// optional set of key, value entries. +func (ah *adaptingHelper) NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { + adaptedEntries := make([]ast.EntryExpr, len(entries)) + for i, e := range entries { + adaptedEntries[i] = mustAdaptToEntryExpr(e) + } + return mustAdaptToProto(ah.modernHelper.NewMap(adaptedEntries...)) +} + +// NewMapEntry creates a Map Entry for the key, value pair. +func (ah *adaptingHelper) NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { + return mustAdaptToProtoEntry( + ah.modernHelper.NewMapEntry(mustAdaptToExpr(key), mustAdaptToExpr(val), optional)) +} + +// NewObject creates a CreateStruct instruction for an object with a given type name and +// optional set of field initializers. +func (ah *adaptingHelper) NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { + adaptedEntries := make([]ast.EntryExpr, len(fieldInits)) + for i, e := range fieldInits { + adaptedEntries[i] = mustAdaptToEntryExpr(e) + } + return mustAdaptToProto(ah.modernHelper.NewStruct(typeName, adaptedEntries...)) +} + +// NewObjectFieldInit creates a new Object field initializer from the field name and value. +func (ah *adaptingHelper) NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { + return mustAdaptToProtoEntry( + ah.modernHelper.NewStructField(field, mustAdaptToExpr(init), optional)) +} + +// Fold creates a fold comprehension instruction. +// +// - iterVar is the iteration variable name. +// - iterRange represents the expression that resolves to a list or map where the elements or +// keys (respectively) will be iterated over. +// - accuVar is the accumulation variable name, typically parser.AccumulatorName. +// - accuInit is the initial expression whose value will be set for the accuVar prior to +// folding. +// - condition is the expression to test to determine whether to continue folding. +// - step is the expression to evaluation at the conclusion of a single fold iteration. +// - result is the computation to evaluate at the conclusion of the fold. +// +// The accuVar should not shadow variable names that you would like to reference within the +// environment in the step and condition expressions. Presently, the name __result__ is commonly +// used by built-in macros but this may change in the future. +func (ah *adaptingHelper) Fold(iterVar string, + iterRange *exprpb.Expr, + accuVar string, + accuInit *exprpb.Expr, + condition *exprpb.Expr, + step *exprpb.Expr, + result *exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto( + ah.modernHelper.NewComprehension( + mustAdaptToExpr(iterRange), + iterVar, + accuVar, + mustAdaptToExpr(accuInit), + mustAdaptToExpr(condition), + mustAdaptToExpr(step), + mustAdaptToExpr(result), + ), + ) +} + +// Ident creates an identifier Expr value. +func (ah *adaptingHelper) Ident(name string) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewIdent(name)) +} + +// AccuIdent returns an accumulator identifier for use with comprehension results. +func (ah *adaptingHelper) AccuIdent() *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewAccuIdent()) +} + +// GlobalCall creates a function call Expr value for a global (free) function. +func (ah *adaptingHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto(ah.modernHelper.NewCall(function, mustAdaptToExprs(args)...)) +} + +// ReceiverCall creates a function call Expr value for a receiver-style function. +func (ah *adaptingHelper) ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { + return mustAdaptToProto( + ah.modernHelper.NewMemberCall(function, mustAdaptToExpr(target), mustAdaptToExprs(args)...)) +} + +// PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. +func (ah *adaptingHelper) PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr { + op := mustAdaptToExpr(operand) + return mustAdaptToProto(ah.modernHelper.NewPresenceTest(op, field)) +} + +// Select create a field traversal Expr value. +func (ah *adaptingHelper) Select(operand *exprpb.Expr, field string) *exprpb.Expr { + op := mustAdaptToExpr(operand) + return mustAdaptToProto(ah.modernHelper.NewSelect(op, field)) +} + +// OffsetLocation returns the Location of the expression identifier. +func (ah *adaptingHelper) OffsetLocation(exprID int64) common.Location { + return ah.modernHelper.OffsetLocation(exprID) +} + +// NewError associates an error message with a given expression id. +func (ah *adaptingHelper) NewError(exprID int64, message string) *Error { + return ah.modernHelper.NewError(exprID, message) +} + +func mustAdaptToExprs(exprs []*exprpb.Expr) []ast.Expr { + adapted := make([]ast.Expr, len(exprs)) + for i, e := range exprs { + adapted[i] = mustAdaptToExpr(e) + } + return adapted +} + +func mustAdaptToExpr(e *exprpb.Expr) ast.Expr { + out, _ := adaptToExpr(e) + return out +} + +func adaptToExpr(e *exprpb.Expr) (ast.Expr, *Error) { + if e == nil { + return nil, nil + } + out, err := ast.ProtoToExpr(e) + if err != nil { + return nil, wrapErr(e.GetId(), "proto conversion failure", err) + } + return out, nil +} + +func mustAdaptToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) ast.EntryExpr { + out, _ := ast.ProtoToEntryExpr(e) + return out +} + +func mustAdaptToProto(e ast.Expr) *exprpb.Expr { + out, _ := adaptToProto(e) + return out +} + +func adaptToProto(e ast.Expr) (*exprpb.Expr, *Error) { + if e == nil { + return nil, nil + } + out, err := ast.ExprToProto(e) + if err != nil { + return nil, wrapErr(e.ID(), "expr conversion failure", err) + } + return out, nil +} + +func mustAdaptToProtoEntry(e ast.EntryExpr) *exprpb.Expr_CreateStruct_Entry { + out, _ := ast.EntryExprToProto(e) + return out +} + +func toParserHelper(meh MacroExprHelper) (parser.ExprHelper, *Error) { + ah, ok := meh.(*adaptingHelper) + if !ok { + return nil, common.NewError(0, + fmt.Sprintf("unsupported macro helper: %v (%T)", meh, meh), + common.NoLocation) + } + return ah.modernHelper, nil +} diff --git a/vendor/github.com/google/cel-go/cel/optimizer.go b/vendor/github.com/google/cel-go/cel/optimizer.go new file mode 100644 index 000000000..99aeeb815 --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/optimizer.go @@ -0,0 +1,482 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" +) + +// StaticOptimizer contains a sequence of ASTOptimizer instances which will be applied in order. +// +// The static optimizer normalizes expression ids and type-checking run between optimization +// passes to ensure that the final optimized output is a valid expression with metadata consistent +// with what would have been generated from a parsed and checked expression. +// +// Note: source position information is best-effort and likely wrong, but optimized expressions +// should be suitable for calls to parser.Unparse. +type StaticOptimizer struct { + optimizers []ASTOptimizer +} + +// NewStaticOptimizer creates a StaticOptimizer with a sequence of ASTOptimizer's to be applied +// to a checked expression. +func NewStaticOptimizer(optimizers ...ASTOptimizer) *StaticOptimizer { + return &StaticOptimizer{ + optimizers: optimizers, + } +} + +// Optimize applies a sequence of optimizations to an Ast within a given environment. +// +// If issues are encountered, the Issues.Err() return value will be non-nil. +func (opt *StaticOptimizer) Optimize(env *Env, a *Ast) (*Ast, *Issues) { + // Make a copy of the AST to be optimized. + optimized := ast.Copy(a.impl) + ids := newIDGenerator(ast.MaxID(a.impl)) + + // Create the optimizer context, could be pooled in the future. + issues := NewIssues(common.NewErrors(a.Source())) + baseFac := ast.NewExprFactory() + exprFac := &optimizerExprFactory{ + idGenerator: ids, + fac: baseFac, + sourceInfo: optimized.SourceInfo(), + } + ctx := &OptimizerContext{ + optimizerExprFactory: exprFac, + Env: env, + Issues: issues, + } + + // Apply the optimizations sequentially. + for _, o := range opt.optimizers { + optimized = o.Optimize(ctx, optimized) + if issues.Err() != nil { + return nil, issues + } + // Normalize expression id metadata including coordination with macro call metadata. + freshIDGen := newIDGenerator(0) + info := optimized.SourceInfo() + expr := optimized.Expr() + normalizeIDs(freshIDGen.renumberStable, expr, info) + cleanupMacroRefs(expr, info) + + // Recheck the updated expression for any possible type-agreement or validation errors. + parsed := &Ast{ + source: a.Source(), + impl: ast.NewAST(expr, info)} + checked, iss := ctx.Check(parsed) + if iss.Err() != nil { + return nil, iss + } + optimized = checked.impl + } + + // Return the optimized result. + return &Ast{ + source: a.Source(), + impl: optimized, + }, nil +} + +// normalizeIDs ensures that the metadata present with an AST is reset in a manner such +// that the ids within the expression correspond to the ids within macros. +func normalizeIDs(idGen ast.IDGenerator, optimized ast.Expr, info *ast.SourceInfo) { + optimized.RenumberIDs(idGen) + + if len(info.MacroCalls()) == 0 { + return + } + + // First, update the macro call ids themselves. + callIDMap := map[int64]int64{} + for id := range info.MacroCalls() { + callIDMap[id] = idGen(id) + } + // Then update the macro call definitions which refer to these ids, but + // ensure that the updates don't collide and remove macro entries which haven't + // been visited / updated yet. + type macroUpdate struct { + id int64 + call ast.Expr + } + macroUpdates := []macroUpdate{} + for oldID, newID := range callIDMap { + call, found := info.GetMacroCall(oldID) + if !found { + continue + } + call.RenumberIDs(idGen) + macroUpdates = append(macroUpdates, macroUpdate{id: newID, call: call}) + info.ClearMacroCall(oldID) + } + for _, u := range macroUpdates { + info.SetMacroCall(u.id, u.call) + } +} + +func cleanupMacroRefs(expr ast.Expr, info *ast.SourceInfo) { + if len(info.MacroCalls()) == 0 { + return + } + // Sanitize the macro call references once the optimized expression has been computed + // and the ids normalized between the expression and the macros. + exprRefMap := make(map[int64]struct{}) + ast.PostOrderVisit(expr, ast.NewExprVisitor(func(e ast.Expr) { + if e.ID() == 0 { + return + } + exprRefMap[e.ID()] = struct{}{} + })) + // Update the macro call id references to ensure that macro pointers are + // updated consistently across macros. + for _, call := range info.MacroCalls() { + ast.PostOrderVisit(call, ast.NewExprVisitor(func(e ast.Expr) { + if e.ID() == 0 { + return + } + exprRefMap[e.ID()] = struct{}{} + })) + } + for id := range info.MacroCalls() { + if _, found := exprRefMap[id]; !found { + info.ClearMacroCall(id) + } + } +} + +// newIDGenerator ensures that new ids are only created the first time they are encountered. +func newIDGenerator(seed int64) *idGenerator { + return &idGenerator{ + idMap: make(map[int64]int64), + seed: seed, + } +} + +type idGenerator struct { + idMap map[int64]int64 + seed int64 +} + +func (gen *idGenerator) nextID() int64 { + gen.seed++ + return gen.seed +} + +func (gen *idGenerator) renumberStable(id int64) int64 { + if id == 0 { + return 0 + } + if newID, found := gen.idMap[id]; found { + return newID + } + nextID := gen.nextID() + gen.idMap[id] = nextID + return nextID +} + +// OptimizerContext embeds Env and Issues instances to make it easy to type-check and evaluate +// subexpressions and report any errors encountered along the way. The context also embeds the +// optimizerExprFactory which can be used to generate new sub-expressions with expression ids +// consistent with the expectations of a parsed expression. +type OptimizerContext struct { + *Env + *optimizerExprFactory + *Issues +} + +// ASTOptimizer applies an optimization over an AST and returns the optimized result. +type ASTOptimizer interface { + // Optimize optimizes a type-checked AST within an Environment and accumulates any issues. + Optimize(*OptimizerContext, *ast.AST) *ast.AST +} + +type optimizerExprFactory struct { + *idGenerator + fac ast.ExprFactory + sourceInfo *ast.SourceInfo +} + +// CopyAST creates a renumbered copy of `Expr` and `SourceInfo` values of the input AST, where the +// renumbering uses the same scheme as the core optimizer logic ensuring there are no collisions +// between copies. +// +// Use this method before attempting to merge the expression from AST into another. +func (opt *optimizerExprFactory) CopyAST(a *ast.AST) (ast.Expr, *ast.SourceInfo) { + idGen := newIDGenerator(opt.nextID()) + defer func() { opt.seed = idGen.nextID() }() + copyExpr := opt.fac.CopyExpr(a.Expr()) + copyInfo := ast.CopySourceInfo(a.SourceInfo()) + normalizeIDs(idGen.renumberStable, copyExpr, copyInfo) + return copyExpr, copyInfo +} + +// NewBindMacro creates an AST expression representing the expanded bind() macro, and a macro expression +// representing the unexpanded call signature to be inserted into the source info macro call metadata. +func (opt *optimizerExprFactory) NewBindMacro(macroID int64, varName string, varInit, remaining ast.Expr) (astExpr, macroExpr ast.Expr) { + varID := opt.nextID() + remainingID := opt.nextID() + remaining = opt.fac.CopyExpr(remaining) + remaining.RenumberIDs(func(id int64) int64 { + if id == macroID { + return remainingID + } + return id + }) + if call, exists := opt.sourceInfo.GetMacroCall(macroID); exists { + opt.sourceInfo.SetMacroCall(remainingID, opt.fac.CopyExpr(call)) + } + + astExpr = opt.fac.NewComprehension(macroID, + opt.fac.NewList(opt.nextID(), []ast.Expr{}, []int32{}), + "#unused", + varName, + opt.fac.CopyExpr(varInit), + opt.fac.NewLiteral(opt.nextID(), types.False), + opt.fac.NewIdent(varID, varName), + remaining) + + macroExpr = opt.fac.NewMemberCall(0, "bind", + opt.fac.NewIdent(opt.nextID(), "cel"), + opt.fac.NewIdent(varID, varName), + opt.fac.CopyExpr(varInit), + opt.fac.CopyExpr(remaining)) + opt.sanitizeMacro(macroID, macroExpr) + return +} + +// NewCall creates a global function call invocation expression. +// +// Example: +// +// countByField(list, fieldName) +// - function: countByField +// - args: [list, fieldName] +func (opt *optimizerExprFactory) NewCall(function string, args ...ast.Expr) ast.Expr { + return opt.fac.NewCall(opt.nextID(), function, args...) +} + +// NewMemberCall creates a member function call invocation expression where 'target' is the receiver of the call. +// +// Example: +// +// list.countByField(fieldName) +// - function: countByField +// - target: list +// - args: [fieldName] +func (opt *optimizerExprFactory) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return opt.fac.NewMemberCall(opt.nextID(), function, target, args...) +} + +// NewIdent creates a new identifier expression. +// +// Examples: +// +// - simple_var_name +// - qualified.subpackage.var_name +func (opt *optimizerExprFactory) NewIdent(name string) ast.Expr { + return opt.fac.NewIdent(opt.nextID(), name) +} + +// NewLiteral creates a new literal expression value. +// +// The range of valid values for a literal generated during optimization is different than for expressions +// generated via parsing / type-checking, as the ref.Val may be _any_ CEL value so long as the value can +// be converted back to a literal-like form. +func (opt *optimizerExprFactory) NewLiteral(value ref.Val) ast.Expr { + return opt.fac.NewLiteral(opt.nextID(), value) +} + +// NewList creates a list expression with a set of optional indices. +// +// Examples: +// +// [a, b] +// - elems: [a, b] +// - optIndices: [] +// +// [a, ?b, ?c] +// - elems: [a, b, c] +// - optIndices: [1, 2] +func (opt *optimizerExprFactory) NewList(elems []ast.Expr, optIndices []int32) ast.Expr { + return opt.fac.NewList(opt.nextID(), elems, optIndices) +} + +// NewMap creates a map from a set of entry expressions which contain a key and value expression. +func (opt *optimizerExprFactory) NewMap(entries []ast.EntryExpr) ast.Expr { + return opt.fac.NewMap(opt.nextID(), entries) +} + +// NewMapEntry creates a map entry with a key and value expression and a flag to indicate whether the +// entry is optional. +// +// Examples: +// +// {a: b} +// - key: a +// - value: b +// - optional: false +// +// {?a: ?b} +// - key: a +// - value: b +// - optional: true +func (opt *optimizerExprFactory) NewMapEntry(key, value ast.Expr, isOptional bool) ast.EntryExpr { + return opt.fac.NewMapEntry(opt.nextID(), key, value, isOptional) +} + +// NewHasMacro generates a test-only select expression to be included within an AST and an unexpanded +// has() macro call signature to be inserted into the source info macro call metadata. +func (opt *optimizerExprFactory) NewHasMacro(macroID int64, s ast.Expr) (astExpr, macroExpr ast.Expr) { + sel := s.AsSelect() + astExpr = opt.fac.NewPresenceTest(macroID, sel.Operand(), sel.FieldName()) + macroExpr = opt.fac.NewCall(0, "has", + opt.NewSelect(opt.fac.CopyExpr(sel.Operand()), sel.FieldName())) + opt.sanitizeMacro(macroID, macroExpr) + return +} + +// NewSelect creates a select expression where a field value is selected from an operand. +// +// Example: +// +// msg.field_name +// - operand: msg +// - field: field_name +func (opt *optimizerExprFactory) NewSelect(operand ast.Expr, field string) ast.Expr { + return opt.fac.NewSelect(opt.nextID(), operand, field) +} + +// NewStruct creates a new typed struct value with an set of field initializations. +// +// Example: +// +// pkg.TypeName{field: value} +// - typeName: pkg.TypeName +// - fields: [{field: value}] +func (opt *optimizerExprFactory) NewStruct(typeName string, fields []ast.EntryExpr) ast.Expr { + return opt.fac.NewStruct(opt.nextID(), typeName, fields) +} + +// NewStructField creates a struct field initialization. +// +// Examples: +// +// {count: 3u} +// - field: count +// - value: 3u +// - optional: false +// +// {?count: x} +// - field: count +// - value: x +// - optional: true +func (opt *optimizerExprFactory) NewStructField(field string, value ast.Expr, isOptional bool) ast.EntryExpr { + return opt.fac.NewStructField(opt.nextID(), field, value, isOptional) +} + +// UpdateExpr updates the target expression with the updated content while preserving macro metadata. +// +// There are four scenarios during the update to consider: +// 1. target is not macro, updated is not macro +// 2. target is macro, updated is not macro +// 3. target is macro, updated is macro +// 4. target is not macro, updated is macro +// +// When the target is a macro already, it may either be updated to a new macro function +// body if the update is also a macro, or it may be removed altogether if the update is +// a macro. +// +// When the update is a macro, then the target references within other macros must be +// updated to point to the new updated macro. Otherwise, other macros which pointed to +// the target body must be replaced with copies of the updated expression body. +func (opt *optimizerExprFactory) UpdateExpr(target, updated ast.Expr) { + // Update the expression + target.SetKindCase(updated) + + // Early return if there's no macros present sa the source info reflects the + // macro set from the target and updated expressions. + if len(opt.sourceInfo.MacroCalls()) == 0 { + return + } + // Determine whether the target expression was a macro. + _, targetIsMacro := opt.sourceInfo.GetMacroCall(target.ID()) + + // Determine whether the updated expression was a macro. + updatedMacro, updatedIsMacro := opt.sourceInfo.GetMacroCall(updated.ID()) + + if updatedIsMacro { + // If the updated call was a macro, then updated id maps to target id, + // and the updated macro moves into the target id slot. + opt.sourceInfo.ClearMacroCall(updated.ID()) + opt.sourceInfo.SetMacroCall(target.ID(), updatedMacro) + } else if targetIsMacro { + // Otherwise if the target expr was a macro, but is no longer, clear + // the macro reference. + opt.sourceInfo.ClearMacroCall(target.ID()) + } + + // Punch holes in the updated value where macros references exist. + macroExpr := opt.fac.CopyExpr(target) + macroRefVisitor := ast.NewExprVisitor(func(e ast.Expr) { + if _, exists := opt.sourceInfo.GetMacroCall(e.ID()); exists { + e.SetKindCase(nil) + } + }) + ast.PostOrderVisit(macroExpr, macroRefVisitor) + + // Update any references to the expression within a macro + macroVisitor := ast.NewExprVisitor(func(call ast.Expr) { + // Update the target expression to point to the macro expression which + // will be empty if the updated expression was a macro. + if call.ID() == target.ID() { + call.SetKindCase(opt.fac.CopyExpr(macroExpr)) + } + // Update the macro call expression if it refers to the updated expression + // id which has since been remapped to the target id. + if call.ID() == updated.ID() { + // Either ensure the expression is a macro reference or a populated with + // the relevant sub-expression if the updated expr was not a macro. + if updatedIsMacro { + call.SetKindCase(nil) + } else { + call.SetKindCase(opt.fac.CopyExpr(macroExpr)) + } + // Since SetKindCase does not renumber the id, ensure the references to + // the old 'updated' id are mapped to the target id. + call.RenumberIDs(func(id int64) int64 { + if id == updated.ID() { + return target.ID() + } + return id + }) + } + }) + for _, call := range opt.sourceInfo.MacroCalls() { + ast.PostOrderVisit(call, macroVisitor) + } +} + +func (opt *optimizerExprFactory) sanitizeMacro(macroID int64, macroExpr ast.Expr) { + macroRefVisitor := ast.NewExprVisitor(func(e ast.Expr) { + if _, exists := opt.sourceInfo.GetMacroCall(e.ID()); exists && e.ID() != macroID { + e.SetKindCase(nil) + } + }) + ast.PostOrderVisit(macroExpr, macroRefVisitor) +} diff --git a/vendor/github.com/google/cel-go/cel/options.go b/vendor/github.com/google/cel-go/cel/options.go index 07f3d6c71..3c53e21af 100644 --- a/vendor/github.com/google/cel-go/cel/options.go +++ b/vendor/github.com/google/cel-go/cel/options.go @@ -23,12 +23,13 @@ import ( "google.golang.org/protobuf/reflect/protoregistry" "google.golang.org/protobuf/types/dynamicpb" - "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/checker" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/functions" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/pb" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/interpreter" - "github.com/google/cel-go/interpreter/functions" "github.com/google/cel-go/parser" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" @@ -41,13 +42,6 @@ import ( const ( _ = iota - // Disallow heterogeneous aggregate (list, map) literals. - // Note, it is still possible to have heterogeneous aggregates when - // provided as variables to the expression, as well as via conversion - // of well-known dynamic types, or with unchecked expressions. - // Affects checking. Provides a subset of standard behavior. - featureDisableDynamicAggregateLiterals - // Enable the tracking of function call expressions replaced by macros. featureEnableMacroCallTracking @@ -63,9 +57,10 @@ const ( // is not already in UTC. featureDefaultUTCTimeZone - // Enable the use of optional types in the syntax, type-system, type-checking, - // and runtime. - featureOptionalTypes + // Enable the serialization of logical operator ASTs as variadic calls, thus + // compressing the logic graph to a single call when multiple like-operator + // expressions occur: e.g. a && b && c && d -> call(_&&_, [a, b, c, d]) + featureVariadicLogicalASTs ) // EnvOption is a functional interface for configuring the environment. @@ -82,23 +77,26 @@ func ClearMacros() EnvOption { } } -// CustomTypeAdapter swaps the default ref.TypeAdapter implementation with a custom one. +// CustomTypeAdapter swaps the default types.Adapter implementation with a custom one. // // Note: This option must be specified before the Types and TypeDescs options when used together. -func CustomTypeAdapter(adapter ref.TypeAdapter) EnvOption { +func CustomTypeAdapter(adapter types.Adapter) EnvOption { return func(e *Env) (*Env, error) { e.adapter = adapter return e, nil } } -// CustomTypeProvider swaps the default ref.TypeProvider implementation with a custom one. +// CustomTypeProvider replaces the types.Provider implementation with a custom one. +// +// The `provider` variable type may either be types.Provider or ref.TypeProvider (deprecated) // // Note: This option must be specified before the Types and TypeDescs options when used together. -func CustomTypeProvider(provider ref.TypeProvider) EnvOption { +func CustomTypeProvider(provider any) EnvOption { return func(e *Env) (*Env, error) { - e.provider = provider - return e, nil + var err error + e.provider, err = maybeInteropProvider(provider) + return e, err } } @@ -108,8 +106,28 @@ func CustomTypeProvider(provider ref.TypeProvider) EnvOption { // for the environment. The NewEnv call builds on top of the standard CEL declarations. For a // purely custom set of declarations use NewCustomEnv. func Declarations(decls ...*exprpb.Decl) EnvOption { + declOpts := []EnvOption{} + var err error + var opt EnvOption + // Convert the declarations to `EnvOption` values ahead of time. + // Surface any errors in conversion when the options are applied. + for _, d := range decls { + opt, err = ExprDeclToDeclaration(d) + if err != nil { + break + } + declOpts = append(declOpts, opt) + } return func(e *Env) (*Env, error) { - e.declarations = append(e.declarations, decls...) + if err != nil { + return nil, err + } + for _, o := range declOpts { + e, err = o(e) + if err != nil { + return nil, err + } + } return e, nil } } @@ -126,14 +144,25 @@ func EagerlyValidateDeclarations(enabled bool) EnvOption { return features(featureEagerlyValidateDeclarations, enabled) } -// HomogeneousAggregateLiterals option ensures that list and map literal entry types must agree -// during type-checking. +// HomogeneousAggregateLiterals disables mixed type list and map literal values. // // Note, it is still possible to have heterogeneous aggregates when provided as variables to the // expression, as well as via conversion of well-known dynamic types, or with unchecked // expressions. func HomogeneousAggregateLiterals() EnvOption { - return features(featureDisableDynamicAggregateLiterals, true) + return ASTValidators(ValidateHomogeneousAggregateLiterals()) +} + +// variadicLogicalOperatorASTs flatten like-operator chained logical expressions into a single +// variadic call with N-terms. This behavior is useful when serializing to a protocol buffer as +// it will reduce the number of recursive calls needed to deserialize the AST later. +// +// For example, given the following expression the call graph will be rendered accordingly: +// +// expression: a && b && c && (d || e) +// ast: call(_&&_, [a, b, c, call(_||_, [d, e])]) +func variadicLogicalOperatorASTs() EnvOption { + return features(featureVariadicLogicalASTs, true) } // Macros option extends the macro set configured in the environment. @@ -226,7 +255,12 @@ func Abbrevs(qualifiedNames ...string) EnvOption { // Note: This option must be specified after the CustomTypeProvider option when used together. func Types(addTypes ...any) EnvOption { return func(e *Env) (*Env, error) { - reg, isReg := e.provider.(ref.TypeRegistry) + var reg ref.TypeRegistry + var isReg bool + reg, isReg = e.provider.(*types.Registry) + if !isReg { + reg, isReg = e.provider.(ref.TypeRegistry) + } if !isReg { return nil, fmt.Errorf("custom types not supported by provider: %T", e.provider) } @@ -414,6 +448,8 @@ const ( OptTrackCost EvalOption = 1 << iota // OptCheckStringFormat enables compile-time checking of string.format calls for syntax/cardinality. + // + // Deprecated: use ext.StringsValidateFormatCalls() as this option is now a no-op. OptCheckStringFormat EvalOption = 1 << iota ) @@ -436,6 +472,24 @@ func InterruptCheckFrequency(checkFrequency uint) ProgramOption { } } +// CostEstimatorOptions configure type-check time options for estimating expression cost. +func CostEstimatorOptions(costOpts ...checker.CostOption) EnvOption { + return func(e *Env) (*Env, error) { + e.costOptions = append(e.costOptions, costOpts...) + return e, nil + } +} + +// CostTrackerOptions configures a set of options for cost-tracking. +// +// Note, CostTrackerOptions is a no-op unless CostTracking is also enabled. +func CostTrackerOptions(costOpts ...interpreter.CostTrackerOption) ProgramOption { + return func(p *prog) (*prog, error) { + p.costOptions = append(p.costOptions, costOpts...) + return p, nil + } +} + // CostTracking enables cost tracking and registers a ActualCostEstimator that can optionally provide a runtime cost estimate for any function calls. func CostTracking(costEstimator interpreter.ActualCostEstimator) ProgramOption { return func(p *prog) (*prog, error) { @@ -457,25 +511,21 @@ func CostLimit(costLimit uint64) ProgramOption { } } -func fieldToCELType(field protoreflect.FieldDescriptor) (*exprpb.Type, error) { +func fieldToCELType(field protoreflect.FieldDescriptor) (*Type, error) { if field.Kind() == protoreflect.MessageKind || field.Kind() == protoreflect.GroupKind { msgName := (string)(field.Message().FullName()) - wellKnownType, found := pb.CheckedWellKnowns[msgName] - if found { - return wellKnownType, nil - } - return decls.NewObjectType(msgName), nil + return ObjectType(msgName), nil } - if primitiveType, found := pb.CheckedPrimitives[field.Kind()]; found { + if primitiveType, found := types.ProtoCELPrimitives[field.Kind()]; found { return primitiveType, nil } if field.Kind() == protoreflect.EnumKind { - return decls.Int, nil + return IntType, nil } return nil, fmt.Errorf("field %s type %s not implemented", field.FullName(), field.Kind().String()) } -func fieldToDecl(field protoreflect.FieldDescriptor) (*exprpb.Decl, error) { +func fieldToVariable(field protoreflect.FieldDescriptor) (EnvOption, error) { name := string(field.Name()) if field.IsMap() { mapKey := field.MapKey() @@ -488,20 +538,20 @@ func fieldToDecl(field protoreflect.FieldDescriptor) (*exprpb.Decl, error) { if err != nil { return nil, err } - return decls.NewVar(name, decls.NewMapType(keyType, valueType)), nil + return Variable(name, MapType(keyType, valueType)), nil } if field.IsList() { elemType, err := fieldToCELType(field) if err != nil { return nil, err } - return decls.NewVar(name, decls.NewListType(elemType)), nil + return Variable(name, ListType(elemType)), nil } celType, err := fieldToCELType(field) if err != nil { return nil, err } - return decls.NewVar(name, celType), nil + return Variable(name, celType), nil } // DeclareContextProto returns an option to extend CEL environment with declarations from the given context proto. @@ -509,23 +559,51 @@ func fieldToDecl(field protoreflect.FieldDescriptor) (*exprpb.Decl, error) { // https://github.com/google/cel-spec/blob/master/doc/langdef.md#evaluation-environment func DeclareContextProto(descriptor protoreflect.MessageDescriptor) EnvOption { return func(e *Env) (*Env, error) { - var decls []*exprpb.Decl fields := descriptor.Fields() for i := 0; i < fields.Len(); i++ { field := fields.Get(i) - decl, err := fieldToDecl(field) + variable, err := fieldToVariable(field) + if err != nil { + return nil, err + } + e, err = variable(e) if err != nil { return nil, err } - decls = append(decls, decl) } - var err error - e, err = Declarations(decls...)(e) + return Types(dynamicpb.NewMessage(descriptor))(e) + } +} + +// ContextProtoVars uses the fields of the input proto.Messages as top-level variables within an Activation. +// +// Consider using with `DeclareContextProto` to simplify variable type declarations and publishing when using +// protocol buffers. +func ContextProtoVars(ctx proto.Message) (interpreter.Activation, error) { + if ctx == nil || !ctx.ProtoReflect().IsValid() { + return interpreter.EmptyActivation(), nil + } + reg, err := types.NewRegistry(ctx) + if err != nil { + return nil, err + } + pbRef := ctx.ProtoReflect() + typeName := string(pbRef.Descriptor().FullName()) + fields := pbRef.Descriptor().Fields() + vars := make(map[string]any, fields.Len()) + for i := 0; i < fields.Len(); i++ { + field := fields.Get(i) + sft, found := reg.FindStructFieldType(typeName, field.TextName()) + if !found { + return nil, fmt.Errorf("no such field: %s", field.TextName()) + } + fieldVal, err := sft.GetFrom(ctx) if err != nil { return nil, err } - return Types(dynamicpb.NewMessage(descriptor))(e) + vars[field.TextName()] = fieldVal } + return interpreter.NewActivation(vars) } // EnableMacroCallTracking ensures that call expressions which are replaced by macros @@ -545,13 +623,6 @@ func DefaultUTCTimeZone(enabled bool) EnvOption { return features(featureDefaultUTCTimeZone, enabled) } -// OptionalTypes enable support for optional syntax and types in CEL. The optional value type makes -// it possible to express whether variables have been provided, whether a result has been computed, -// and in the future whether an object field path, map key value, or list index has a value. -func OptionalTypes() EnvOption { - return Lib(optionalLibrary{}) -} - // features sets the given feature flags. See list of Feature constants above. func features(flag int, enabled bool) EnvOption { return func(e *Env) (*Env, error) { @@ -577,3 +648,14 @@ func ParserExpressionSizeLimit(limit int) EnvOption { return e, nil } } + +func maybeInteropProvider(provider any) (types.Provider, error) { + switch p := provider.(type) { + case types.Provider: + return p, nil + case ref.TypeProvider: + return &interopCELTypeProvider{TypeProvider: p}, nil + default: + return nil, fmt.Errorf("unsupported type provider: %T", provider) + } +} diff --git a/vendor/github.com/google/cel-go/cel/program.go b/vendor/github.com/google/cel-go/cel/program.go index a630f5bfa..ece9fbdaf 100644 --- a/vendor/github.com/google/cel-go/cel/program.go +++ b/vendor/github.com/google/cel-go/cel/program.go @@ -22,8 +22,6 @@ import ( "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/interpreter" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // Program is an evaluable view of an Ast. @@ -62,6 +60,9 @@ func NoVars() interpreter.Activation { // PartialVars returns a PartialActivation which contains variables and a set of AttributePattern // values that indicate variables or parts of variables whose value are not yet known. // +// This method relies on manually configured sets of missing attribute patterns. For a method which +// infers the missing variables from the input and the configured environment, use Env.PartialVars(). +// // The `vars` value may either be an interpreter.Activation or any valid input to the // interpreter.NewActivation call. func PartialVars(vars any, @@ -104,7 +105,7 @@ func (ed *EvalDetails) State() interpreter.EvalState { // ActualCost returns the tracked cost through the course of execution when `CostTracking` is enabled. // Otherwise, returns nil if the cost was not enabled. func (ed *EvalDetails) ActualCost() *uint64 { - if ed.costTracker == nil { + if ed == nil || ed.costTracker == nil { return nil } cost := ed.costTracker.ActualCost() @@ -128,10 +129,14 @@ type prog struct { // Interpretable configured from an Ast and aggregate decorator set based on program options. interpretable interpreter.Interpretable callCostEstimator interpreter.ActualCostEstimator + costOptions []interpreter.CostTrackerOption costLimit *uint64 } func (p *prog) clone() *prog { + costOptsCopy := make([]interpreter.CostTrackerOption, len(p.costOptions)) + copy(costOptsCopy, p.costOptions) + return &prog{ Env: p.Env, evalOpts: p.evalOpts, @@ -146,16 +151,17 @@ func (p *prog) clone() *prog { // ProgramOption values. // // If the program cannot be configured the prog will be nil, with a non-nil error response. -func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { +func newProgram(e *Env, a *Ast, opts []ProgramOption) (Program, error) { // Build the dispatcher, interpreter, and default program value. disp := interpreter.NewDispatcher() // Ensure the default attribute factory is set after the adapter and provider are // configured. p := &prog{ - Env: e, - decorators: []interpreter.InterpretableDecorator{}, - dispatcher: disp, + Env: e, + decorators: []interpreter.InterpretableDecorator{}, + dispatcher: disp, + costOptions: []interpreter.CostTrackerOption{}, } // Configure the program via the ProgramOption values. @@ -169,7 +175,7 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { // Add the function bindings created via Function() options. for _, fn := range e.functions { - bindings, err := fn.bindings() + bindings, err := fn.Bindings() if err != nil { return nil, err } @@ -206,43 +212,18 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { if len(p.regexOptimizations) > 0 { decorators = append(decorators, interpreter.CompileRegexConstants(p.regexOptimizations...)) } - // Enable compile-time checking of syntax/cardinality for string.format calls. - if p.evalOpts&OptCheckStringFormat == OptCheckStringFormat { - var isValidType func(id int64, validTypes ...*types.TypeValue) (bool, error) - if ast.IsChecked() { - isValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) { - t, err := ExprTypeToType(ast.typeMap[id]) - if err != nil { - return false, err - } - if t.kind == DynKind { - return true, nil - } - for _, vt := range validTypes { - k, err := typeValueToKind(vt) - if err != nil { - return false, err - } - if k == t.kind { - return true, nil - } - } - return false, nil - } - } else { - // if the AST isn't type-checked, short-circuit validation - isValidType = func(id int64, validTypes ...*types.TypeValue) (bool, error) { - return true, nil - } - } - decorators = append(decorators, interpreter.InterpolateFormattedString(isValidType)) - } // Enable exhaustive eval, state tracking and cost tracking last since they require a factory. if p.evalOpts&(OptExhaustiveEval|OptTrackState|OptTrackCost) != 0 { factory := func(state interpreter.EvalState, costTracker *interpreter.CostTracker) (Program, error) { costTracker.Estimator = p.callCostEstimator costTracker.Limit = p.costLimit + for _, costOpt := range p.costOptions { + err := costOpt(costTracker) + if err != nil { + return nil, err + } + } // Limit capacity to guarantee a reallocation when calling 'append(decs, ...)' below. This // prevents the underlying memory from being shared between factory function calls causing // undesired mutations. @@ -264,32 +245,16 @@ func newProgram(e *Env, ast *Ast, opts []ProgramOption) (Program, error) { decs = append(decs, interpreter.Observe(observers...)) } - return p.clone().initInterpretable(ast, decs) + return p.clone().initInterpretable(a, decs) } return newProgGen(factory) } - return p.initInterpretable(ast, decorators) + return p.initInterpretable(a, decorators) } -func (p *prog) initInterpretable(ast *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) { - // Unchecked programs do not contain type and reference information and may be slower to execute. - if !ast.IsChecked() { - interpretable, err := - p.interpreter.NewUncheckedInterpretable(ast.Expr(), decs...) - if err != nil { - return nil, err - } - p.interpretable = interpretable - return p, nil - } - - // When the AST has been checked it contains metadata that can be used to speed up program execution. - var checked *exprpb.CheckedExpr - checked, err := AstToCheckedExpr(ast) - if err != nil { - return nil, err - } - interpretable, err := p.interpreter.NewInterpretable(checked, decs...) +func (p *prog) initInterpretable(a *Ast, decs []interpreter.InterpretableDecorator) (*prog, error) { + // When the AST has been exprAST it contains metadata that can be used to speed up program execution. + interpretable, err := p.interpreter.NewInterpretable(a.impl, decs...) if err != nil { return nil, err } @@ -371,7 +336,11 @@ type progGen struct { // the test is successful. func newProgGen(factory progFactory) (Program, error) { // Test the factory to make sure that configuration errors are spotted at config - _, err := factory(interpreter.NewEvalState(), &interpreter.CostTracker{}) + tracker, err := interpreter.NewCostTracker(nil) + if err != nil { + return nil, err + } + _, err = factory(interpreter.NewEvalState(), tracker) if err != nil { return nil, err } @@ -384,7 +353,10 @@ func (gen *progGen) Eval(input any) (ref.Val, *EvalDetails, error) { // new EvalState instance for each call to ensure that unique evaluations yield unique stateful // results. state := interpreter.NewEvalState() - costTracker := &interpreter.CostTracker{} + costTracker, err := interpreter.NewCostTracker(nil) + if err != nil { + return nil, nil, err + } det := &EvalDetails{state: state, costTracker: costTracker} // Generate a new instance of the interpretable using the factory configured during the call to @@ -412,7 +384,10 @@ func (gen *progGen) ContextEval(ctx context.Context, input any) (ref.Val, *EvalD // new EvalState instance for each call to ensure that unique evaluations yield unique stateful // results. state := interpreter.NewEvalState() - costTracker := &interpreter.CostTracker{} + costTracker, err := interpreter.NewCostTracker(nil) + if err != nil { + return nil, nil, err + } det := &EvalDetails{state: state, costTracker: costTracker} // Generate a new instance of the interpretable using the factory configured during the call to @@ -498,7 +473,7 @@ type evalActivation struct { // The lazy binding will only be invoked once per evaluation. // // Values which are not represented as ref.Val types on input may be adapted to a ref.Val using -// the ref.TypeAdapter configured in the environment. +// the types.Adapter configured in the environment. func (a *evalActivation) ResolveName(name string) (any, bool) { v, found := a.vars[name] if !found { @@ -559,8 +534,6 @@ func (p *evalActivationPool) Put(value any) { } var ( - emptyEvalState = interpreter.NewEvalState() - // activationPool is an internally managed pool of Activation values that wrap map[string]any inputs activationPool = newEvalActivationPool() diff --git a/vendor/github.com/google/cel-go/cel/validator.go b/vendor/github.com/google/cel-go/cel/validator.go new file mode 100644 index 000000000..b50c67452 --- /dev/null +++ b/vendor/github.com/google/cel-go/cel/validator.go @@ -0,0 +1,375 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cel + +import ( + "fmt" + "reflect" + "regexp" + + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/overloads" +) + +const ( + homogeneousValidatorName = "cel.lib.std.validate.types.homogeneous" + + // HomogeneousAggregateLiteralExemptFunctions is the ValidatorConfig key used to configure + // the set of function names which are exempt from homogeneous type checks. The expected type + // is a string list of function names. + // + // As an example, the `.format([args])` call expects the input arguments list to be + // comprised of a variety of types which correspond to the types expected by the format control + // clauses; however, all other uses of a mixed element type list, would be unexpected. + HomogeneousAggregateLiteralExemptFunctions = homogeneousValidatorName + ".exempt" +) + +// ASTValidators configures a set of ASTValidator instances into the target environment. +// +// Validators are applied in the order in which the are specified and are treated as singletons. +// The same ASTValidator with a given name will not be applied more than once. +func ASTValidators(validators ...ASTValidator) EnvOption { + return func(e *Env) (*Env, error) { + for _, v := range validators { + if !e.HasValidator(v.Name()) { + e.validators = append(e.validators, v) + } + } + return e, nil + } +} + +// ASTValidator defines a singleton interface for validating a type-checked Ast against an environment. +// +// Note: the Issues argument is mutable in the sense that it is intended to collect errors which will be +// reported to the caller. +type ASTValidator interface { + // Name returns the name of the validator. Names must be unique. + Name() string + + // Validate validates a given Ast within an Environment and collects a set of potential issues. + // + // The ValidatorConfig is generated from the set of ASTValidatorConfigurer instances prior to + // the invocation of the Validate call. The expectation is that the validator configuration + // is created in sequence and immutable once provided to the Validate call. + // + // See individual validators for more information on their configuration keys and configuration + // properties. + Validate(*Env, ValidatorConfig, *ast.AST, *Issues) +} + +// ValidatorConfig provides an accessor method for querying validator configuration state. +type ValidatorConfig interface { + GetOrDefault(name string, value any) any +} + +// MutableValidatorConfig provides mutation methods for querying and updating validator configuration +// settings. +type MutableValidatorConfig interface { + ValidatorConfig + Set(name string, value any) error +} + +// ASTValidatorConfigurer indicates that this object, currently expected to be an ASTValidator, +// participates in validator configuration settings. +// +// This interface may be split from the expectation of being an ASTValidator instance in the future. +type ASTValidatorConfigurer interface { + Configure(MutableValidatorConfig) error +} + +// validatorConfig implements the ValidatorConfig and MutableValidatorConfig interfaces. +type validatorConfig struct { + data map[string]any +} + +// newValidatorConfig initializes the validator config with default values for core CEL validators. +func newValidatorConfig() *validatorConfig { + return &validatorConfig{ + data: map[string]any{ + HomogeneousAggregateLiteralExemptFunctions: []string{}, + }, + } +} + +// GetOrDefault returns the configured value for the name, if present, else the input default value. +// +// Note, the type-agreement between the input default and configured value is not checked on read. +func (config *validatorConfig) GetOrDefault(name string, value any) any { + v, found := config.data[name] + if !found { + return value + } + return v +} + +// Set configures a validator option with the given name and value. +// +// If the value had previously been set, the new value must have the same reflection type as the old one, +// or the call will error. +func (config *validatorConfig) Set(name string, value any) error { + v, found := config.data[name] + if found && reflect.TypeOf(v) != reflect.TypeOf(value) { + return fmt.Errorf("incompatible configuration type for %s, got %T, wanted %T", name, value, v) + } + config.data[name] = value + return nil +} + +// ExtendedValidations collects a set of common AST validations which reduce the likelihood of runtime errors. +// +// - Validate duration and timestamp literals +// - Ensure regex strings are valid +// - Disable mixed type list and map literals +func ExtendedValidations() EnvOption { + return ASTValidators( + ValidateDurationLiterals(), + ValidateTimestampLiterals(), + ValidateRegexLiterals(), + ValidateHomogeneousAggregateLiterals(), + ) +} + +// ValidateDurationLiterals ensures that duration literal arguments are valid immediately after type-check. +func ValidateDurationLiterals() ASTValidator { + return newFormatValidator(overloads.TypeConvertDuration, 0, evalCall) +} + +// ValidateTimestampLiterals ensures that timestamp literal arguments are valid immediately after type-check. +func ValidateTimestampLiterals() ASTValidator { + return newFormatValidator(overloads.TypeConvertTimestamp, 0, evalCall) +} + +// ValidateRegexLiterals ensures that regex patterns are validated after type-check. +func ValidateRegexLiterals() ASTValidator { + return newFormatValidator(overloads.Matches, 0, compileRegex) +} + +// ValidateHomogeneousAggregateLiterals checks that all list and map literals entries have the same types, i.e. +// no mixed list element types or mixed map key or map value types. +// +// Note: the string format call relies on a mixed element type list for ease of use, so this check skips all +// literals which occur within string format calls. +func ValidateHomogeneousAggregateLiterals() ASTValidator { + return homogeneousAggregateLiteralValidator{} +} + +// ValidateComprehensionNestingLimit ensures that comprehension nesting does not exceed the specified limit. +// +// This validator can be useful for preventing arbitrarily nested comprehensions which can take high polynomial +// time to complete. +// +// Note, this limit does not apply to comprehensions with an empty iteration range, as these comprehensions have +// no actual looping cost. The cel.bind() utilizes the comprehension structure to perform local variable +// assignments and supplies an empty iteration range, so they won't count against the nesting limit either. +func ValidateComprehensionNestingLimit(limit int) ASTValidator { + return nestingLimitValidator{limit: limit} +} + +type argChecker func(env *Env, call, arg ast.Expr) error + +func newFormatValidator(funcName string, argNum int, check argChecker) formatValidator { + return formatValidator{ + funcName: funcName, + check: check, + argNum: argNum, + } +} + +type formatValidator struct { + funcName string + argNum int + check argChecker +} + +// Name returns the unique name of this function format validator. +func (v formatValidator) Name() string { + return fmt.Sprintf("cel.lib.std.validate.functions.%s", v.funcName) +} + +// Validate searches the AST for uses of a given function name with a constant argument and performs a check +// on whether the argument is a valid literal value. +func (v formatValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) { + root := ast.NavigateAST(a) + funcCalls := ast.MatchDescendants(root, ast.FunctionMatcher(v.funcName)) + for _, call := range funcCalls { + callArgs := call.AsCall().Args() + if len(callArgs) <= v.argNum { + continue + } + litArg := callArgs[v.argNum] + if litArg.Kind() != ast.LiteralKind { + continue + } + if err := v.check(e, call, litArg); err != nil { + iss.ReportErrorAtID(litArg.ID(), "invalid %s argument", v.funcName) + } + } +} + +func evalCall(env *Env, call, arg ast.Expr) error { + ast := &Ast{impl: ast.NewAST(call, ast.NewSourceInfo(nil))} + prg, err := env.Program(ast) + if err != nil { + return err + } + _, _, err = prg.Eval(NoVars()) + return err +} + +func compileRegex(_ *Env, _, arg ast.Expr) error { + pattern := arg.AsLiteral().Value().(string) + _, err := regexp.Compile(pattern) + return err +} + +type homogeneousAggregateLiteralValidator struct{} + +// Name returns the unique name of the homogeneous type validator. +func (homogeneousAggregateLiteralValidator) Name() string { + return homogeneousValidatorName +} + +// Validate validates that all lists and map literals have homogeneous types, i.e. don't contain dyn types. +// +// This validator makes an exception for list and map literals which occur at any level of nesting within +// string format calls. +func (v homogeneousAggregateLiteralValidator) Validate(_ *Env, c ValidatorConfig, a *ast.AST, iss *Issues) { + var exemptedFunctions []string + exemptedFunctions = c.GetOrDefault(HomogeneousAggregateLiteralExemptFunctions, exemptedFunctions).([]string) + root := ast.NavigateAST(a) + listExprs := ast.MatchDescendants(root, ast.KindMatcher(ast.ListKind)) + for _, listExpr := range listExprs { + if inExemptFunction(listExpr, exemptedFunctions) { + continue + } + l := listExpr.AsList() + elements := l.Elements() + optIndices := l.OptionalIndices() + var elemType *Type + for i, e := range elements { + et := a.GetType(e.ID()) + if isOptionalIndex(i, optIndices) { + et = et.Parameters()[0] + } + if elemType == nil { + elemType = et + continue + } + if !elemType.IsEquivalentType(et) { + v.typeMismatch(iss, e.ID(), elemType, et) + break + } + } + } + mapExprs := ast.MatchDescendants(root, ast.KindMatcher(ast.MapKind)) + for _, mapExpr := range mapExprs { + if inExemptFunction(mapExpr, exemptedFunctions) { + continue + } + m := mapExpr.AsMap() + entries := m.Entries() + var keyType, valType *Type + for _, e := range entries { + mapEntry := e.AsMapEntry() + key, val := mapEntry.Key(), mapEntry.Value() + kt, vt := a.GetType(key.ID()), a.GetType(val.ID()) + if mapEntry.IsOptional() { + vt = vt.Parameters()[0] + } + if keyType == nil && valType == nil { + keyType, valType = kt, vt + continue + } + if !keyType.IsEquivalentType(kt) { + v.typeMismatch(iss, key.ID(), keyType, kt) + } + if !valType.IsEquivalentType(vt) { + v.typeMismatch(iss, val.ID(), valType, vt) + } + } + } +} + +func inExemptFunction(e ast.NavigableExpr, exemptFunctions []string) bool { + parent, found := e.Parent() + for found { + if parent.Kind() == ast.CallKind { + fnName := parent.AsCall().FunctionName() + for _, exempt := range exemptFunctions { + if exempt == fnName { + return true + } + } + } + parent, found = parent.Parent() + } + return false +} + +func isOptionalIndex(i int, optIndices []int32) bool { + for _, optInd := range optIndices { + if i == int(optInd) { + return true + } + } + return false +} + +func (homogeneousAggregateLiteralValidator) typeMismatch(iss *Issues, id int64, expected, actual *Type) { + iss.ReportErrorAtID(id, "expected type '%s' but found '%s'", FormatCELType(expected), FormatCELType(actual)) +} + +type nestingLimitValidator struct { + limit int +} + +func (v nestingLimitValidator) Name() string { + return "cel.lib.std.validate.comprehension_nesting_limit" +} + +func (v nestingLimitValidator) Validate(e *Env, _ ValidatorConfig, a *ast.AST, iss *Issues) { + root := ast.NavigateAST(a) + comprehensions := ast.MatchDescendants(root, ast.KindMatcher(ast.ComprehensionKind)) + if len(comprehensions) <= v.limit { + return + } + for _, comp := range comprehensions { + count := 0 + e := comp + hasParent := true + for hasParent { + // When the expression is not a comprehension, continue to the next ancestor. + if e.Kind() != ast.ComprehensionKind { + e, hasParent = e.Parent() + continue + } + // When the comprehension has an empty range, continue to the next ancestor + // as this comprehension does not have any associated cost. + iterRange := e.AsComprehension().IterRange() + if iterRange.Kind() == ast.ListKind && iterRange.AsList().Size() == 0 { + e, hasParent = e.Parent() + continue + } + // Otherwise check the nesting limit. + count++ + if count > v.limit { + iss.ReportErrorAtID(comp.ID(), "comprehension exceeds nesting limit") + break + } + e, hasParent = e.Parent() + } + } +} diff --git a/vendor/github.com/google/cel-go/checker/BUILD.bazel b/vendor/github.com/google/cel-go/checker/BUILD.bazel index 1c6ddb7f7..997fa91d1 100644 --- a/vendor/github.com/google/cel-go/checker/BUILD.bazel +++ b/vendor/github.com/google/cel-go/checker/BUILD.bazel @@ -11,9 +11,11 @@ go_library( "cost.go", "env.go", "errors.go", + "format.go", "mapping.go", "options.go", "printer.go", + "scopes.go", "standard.go", "types.go", ], @@ -22,10 +24,13 @@ go_library( deps = [ "//checker/decls:go_default_library", "//common:go_default_library", + "//common/ast:go_default_library", "//common/containers:go_default_library", "//common/debug:go_default_library", + "//common/decls:go_default_library", "//common/operators:go_default_library", "//common/overloads:go_default_library", + "//common/stdlib:go_default_library", "//common/types:go_default_library", "//common/types/pb:go_default_library", "//common/types/ref:go_default_library", @@ -44,6 +49,7 @@ go_test( "checker_test.go", "cost_test.go", "env_test.go", + "format_test.go", ], embed = [ ":go_default_library", @@ -54,7 +60,6 @@ go_test( "//test:go_default_library", "//test/proto2pb:go_default_library", "//test/proto3pb:go_default_library", - "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/checker/checker.go b/vendor/github.com/google/cel-go/checker/checker.go index 257cffecf..57fb3ce5e 100644 --- a/vendor/github.com/google/cel-go/checker/checker.go +++ b/vendor/github.com/google/cel-go/checker/checker.go @@ -20,150 +20,104 @@ import ( "fmt" "reflect" - "github.com/google/cel-go/checker/decls" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/decls" "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - - "google.golang.org/protobuf/proto" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) type checker struct { + *ast.AST + ast.ExprFactory env *Env errors *typeErrors mappings *mapping freeTypeVarCounter int - sourceInfo *exprpb.SourceInfo - types map[int64]*exprpb.Type - references map[int64]*exprpb.Reference } // Check performs type checking, giving a typed AST. -// The input is a ParsedExpr proto and an env which encapsulates -// type binding of variables, declarations of built-in functions, -// descriptions of protocol buffers, and a registry for errors. -// Returns a CheckedExpr proto, which might not be usable if -// there are errors in the error registry. -func Check(parsedExpr *exprpb.ParsedExpr, - source common.Source, - env *Env) (*exprpb.CheckedExpr, *common.Errors) { +// +// The input is a parsed AST and an env which encapsulates type binding of variables, +// declarations of built-in functions, descriptions of protocol buffers, and a registry for +// errors. +// +// Returns a type-checked AST, which might not be usable if there are errors in the error +// registry. +func Check(parsed *ast.AST, source common.Source, env *Env) (*ast.AST, *common.Errors) { + errs := common.NewErrors(source) + typeMap := make(map[int64]*types.Type) + refMap := make(map[int64]*ast.ReferenceInfo) c := checker{ + AST: ast.NewCheckedAST(parsed, typeMap, refMap), + ExprFactory: ast.NewExprFactory(), env: env, - errors: &typeErrors{common.NewErrors(source)}, + errors: &typeErrors{errs: errs}, mappings: newMapping(), freeTypeVarCounter: 0, - sourceInfo: parsedExpr.GetSourceInfo(), - types: make(map[int64]*exprpb.Type), - references: make(map[int64]*exprpb.Reference), } - c.check(parsedExpr.GetExpr()) + c.check(c.Expr()) - // Walk over the final type map substituting any type parameters either by their bound value or - // by DYN. - m := make(map[int64]*exprpb.Type) - for k, v := range c.types { - m[k] = substitute(c.mappings, v, true) + // Walk over the final type map substituting any type parameters either by their bound value + // or by DYN. + for id, t := range c.TypeMap() { + c.SetType(id, substitute(c.mappings, t, true)) } - - return &exprpb.CheckedExpr{ - Expr: parsedExpr.GetExpr(), - SourceInfo: parsedExpr.GetSourceInfo(), - TypeMap: m, - ReferenceMap: c.references, - }, c.errors.Errors + return c.AST, errs } -func (c *checker) check(e *exprpb.Expr) { +func (c *checker) check(e ast.Expr) { if e == nil { return } - - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - literal := e.GetConstExpr() - switch literal.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - c.checkBoolLiteral(e) - case *exprpb.Constant_BytesValue: - c.checkBytesLiteral(e) - case *exprpb.Constant_DoubleValue: - c.checkDoubleLiteral(e) - case *exprpb.Constant_Int64Value: - c.checkInt64Literal(e) - case *exprpb.Constant_NullValue: - c.checkNullLiteral(e) - case *exprpb.Constant_StringValue: - c.checkStringLiteral(e) - case *exprpb.Constant_Uint64Value: - c.checkUint64Literal(e) + switch e.Kind() { + case ast.LiteralKind: + literal := ref.Val(e.AsLiteral()) + switch literal.Type() { + case types.BoolType, types.BytesType, types.DoubleType, types.IntType, + types.NullType, types.StringType, types.UintType: + c.setType(e, literal.Type().(*types.Type)) + default: + c.errors.unexpectedASTType(e.ID(), c.location(e), "literal", literal.Type().TypeName()) } - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: c.checkIdent(e) - case *exprpb.Expr_SelectExpr: + case ast.SelectKind: c.checkSelect(e) - case *exprpb.Expr_CallExpr: + case ast.CallKind: c.checkCall(e) - case *exprpb.Expr_ListExpr: + case ast.ListKind: c.checkCreateList(e) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + c.checkCreateMap(e) + case ast.StructKind: c.checkCreateStruct(e) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: c.checkComprehension(e) default: - c.errors.ReportError( - c.location(e), "Unrecognized ast type: %v", reflect.TypeOf(e)) + c.errors.unexpectedASTType(e.ID(), c.location(e), "unspecified", reflect.TypeOf(e).Name()) } } -func (c *checker) checkInt64Literal(e *exprpb.Expr) { - c.setType(e, decls.Int) -} - -func (c *checker) checkUint64Literal(e *exprpb.Expr) { - c.setType(e, decls.Uint) -} - -func (c *checker) checkStringLiteral(e *exprpb.Expr) { - c.setType(e, decls.String) -} - -func (c *checker) checkBytesLiteral(e *exprpb.Expr) { - c.setType(e, decls.Bytes) -} - -func (c *checker) checkDoubleLiteral(e *exprpb.Expr) { - c.setType(e, decls.Double) -} - -func (c *checker) checkBoolLiteral(e *exprpb.Expr) { - c.setType(e, decls.Bool) -} - -func (c *checker) checkNullLiteral(e *exprpb.Expr) { - c.setType(e, decls.Null) -} - -func (c *checker) checkIdent(e *exprpb.Expr) { - identExpr := e.GetIdentExpr() +func (c *checker) checkIdent(e ast.Expr) { + identName := e.AsIdent() // Check to see if the identifier is declared. - if ident := c.env.LookupIdent(identExpr.GetName()); ident != nil { - c.setType(e, ident.GetIdent().GetType()) - c.setReference(e, newIdentReference(ident.GetName(), ident.GetIdent().GetValue())) + if ident := c.env.LookupIdent(identName); ident != nil { + c.setType(e, ident.Type()) + c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value())) // Overwrite the identifier with its fully qualified name. - identExpr.Name = ident.GetName() + e.SetKindCase(c.NewIdent(e.ID(), ident.Name())) return } - c.setType(e, decls.Error) - c.errors.undeclaredReference( - c.location(e), c.env.container.Name(), identExpr.GetName()) + c.setType(e, types.ErrorType) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), identName) } -func (c *checker) checkSelect(e *exprpb.Expr) { - sel := e.GetSelectExpr() +func (c *checker) checkSelect(e ast.Expr) { + sel := e.AsSelect() // Before traversing down the tree, try to interpret as qualified name. qname, found := containers.ToQualifiedName(e) if found { @@ -174,42 +128,38 @@ func (c *checker) checkSelect(e *exprpb.Expr) { // Rewrite the node to be a variable reference to the resolved fully-qualified // variable name. - c.setType(e, ident.GetIdent().GetType()) - c.setReference(e, newIdentReference(ident.GetName(), ident.GetIdent().GetValue())) - identName := ident.GetName() - e.ExprKind = &exprpb.Expr_IdentExpr{ - IdentExpr: &exprpb.Expr_Ident{ - Name: identName, - }, - } + c.setType(e, ident.Type()) + c.setReference(e, ast.NewIdentReference(ident.Name(), ident.Value())) + e.SetKindCase(c.NewIdent(e.ID(), ident.Name())) return } } - resultType := c.checkSelectField(e, sel.GetOperand(), sel.GetField(), false) - if sel.TestOnly { - resultType = decls.Bool + resultType := c.checkSelectField(e, sel.Operand(), sel.FieldName(), false) + if sel.IsTestOnly() { + resultType = types.BoolType } c.setType(e, substitute(c.mappings, resultType, false)) } -func (c *checker) checkOptSelect(e *exprpb.Expr) { +func (c *checker) checkOptSelect(e ast.Expr) { // Collect metadata related to the opt select call packaged by the parser. - call := e.GetCallExpr() - operand := call.GetArgs()[0] - field := call.GetArgs()[1] + call := e.AsCall() + operand := call.Args()[0] + field := call.Args()[1] fieldName, isString := maybeUnwrapString(field) if !isString { - c.errors.ReportError(c.location(field), "unsupported optional field selection: %v", field) + c.errors.notAnOptionalFieldSelection(field.ID(), c.location(field), field) return } // Perform type-checking using the field selection logic. resultType := c.checkSelectField(e, operand, fieldName, true) c.setType(e, substitute(c.mappings, resultType, false)) + c.setReference(e, ast.NewFunctionReference("select_optional_field")) } -func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, optional bool) *exprpb.Type { +func (c *checker) checkSelectField(e, operand ast.Expr, field string, optional bool) *types.Type { // Interpret as field selection, first traversing down the operand. c.check(operand) operandType := substitute(c.mappings, c.getType(operand), false) @@ -218,74 +168,71 @@ func (c *checker) checkSelectField(e, operand *exprpb.Expr, field string, option targetType, isOpt := maybeUnwrapOptional(operandType) // Assume error type by default as most types do not support field selection. - resultType := decls.Error - switch kindOf(targetType) { - case kindMap: + resultType := types.ErrorType + switch targetType.Kind() { + case types.MapKind: // Maps yield their value type as the selection result type. - mapType := targetType.GetMapType() - resultType = mapType.GetValueType() - case kindObject: + resultType = targetType.Parameters()[1] + case types.StructKind: // Objects yield their field type declaration as the selection result type, but only if // the field is defined. messageType := targetType - if fieldType, found := c.lookupFieldType(c.location(e), messageType.GetMessageType(), field); found { - resultType = fieldType.Type + if fieldType, found := c.lookupFieldType(e.ID(), messageType.TypeName(), field); found { + resultType = fieldType } - case kindTypeParam: + case types.TypeParamKind: // Set the operand type to DYN to prevent assignment to a potentially incorrect type // at a later point in type-checking. The isAssignable call will update the type // substitutions for the type param under the covers. - c.isAssignable(decls.Dyn, targetType) + c.isAssignable(types.DynType, targetType) // Also, set the result type to DYN. - resultType = decls.Dyn + resultType = types.DynType default: // Dynamic / error values are treated as DYN type. Errors are handled this way as well // in order to allow forward progress on the check. if !isDynOrError(targetType) { - c.errors.typeDoesNotSupportFieldSelection(c.location(e), targetType) + c.errors.typeDoesNotSupportFieldSelection(e.ID(), c.location(e), targetType) } - resultType = decls.Dyn + resultType = types.DynType } // If the target type was optional coming in, then the result must be optional going out. if isOpt || optional { - return decls.NewOptionalType(resultType) + return types.NewOptionalType(resultType) } return resultType } -func (c *checker) checkCall(e *exprpb.Expr) { +func (c *checker) checkCall(e ast.Expr) { // Note: similar logic exists within the `interpreter/planner.go`. If making changes here // please consider the impact on planner.go and consolidate implementations or mirror code // as appropriate. - call := e.GetCallExpr() - fnName := call.GetFunction() + call := e.AsCall() + fnName := call.FunctionName() if fnName == operators.OptSelect { c.checkOptSelect(e) return } - args := call.GetArgs() + args := call.Args() // Traverse arguments. for _, arg := range args { c.check(arg) } - target := call.GetTarget() // Regular static call with simple name. - if target == nil { + if !call.IsMemberFunction() { // Check for the existence of the function. fn := c.env.LookupFunction(fnName) if fn == nil { - c.errors.undeclaredReference( - c.location(e), c.env.container.Name(), fnName) - c.setType(e, decls.Error) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName) + c.setType(e, types.ErrorType) return } // Overwrite the function name with its fully qualified resolved name. - call.Function = fn.GetName() + e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...)) // Check to see whether the overload resolves. - c.resolveOverloadOrError(c.location(e), e, fn, nil, args) + c.resolveOverloadOrError(e, fn, nil, args) return } @@ -294,6 +241,7 @@ func (c *checker) checkCall(e *exprpb.Expr) { // target a.b. // // Check whether the target is a namespaced function name. + target := call.Target() qualifiedPrefix, maybeQualified := containers.ToQualifiedName(target) if maybeQualified { maybeQualifiedName := qualifiedPrefix + "." + fnName @@ -302,34 +250,32 @@ func (c *checker) checkCall(e *exprpb.Expr) { // The function name is namespaced and so preserving the target operand would // be an inaccurate representation of the desired evaluation behavior. // Overwrite with fully-qualified resolved function name sans receiver target. - call.Target = nil - call.Function = fn.GetName() - c.resolveOverloadOrError(c.location(e), e, fn, nil, args) + e.SetKindCase(c.NewCall(e.ID(), fn.Name(), args...)) + c.resolveOverloadOrError(e, fn, nil, args) return } } // Regular instance call. - c.check(call.Target) + c.check(target) fn := c.env.LookupFunction(fnName) // Function found, attempt overload resolution. if fn != nil { - c.resolveOverloadOrError(c.location(e), e, fn, target, args) + c.resolveOverloadOrError(e, fn, target, args) return } // Function name not declared, record error. - c.errors.undeclaredReference(c.location(e), c.env.container.Name(), fnName) + c.setType(e, types.ErrorType) + c.errors.undeclaredReference(e.ID(), c.location(e), c.env.container.Name(), fnName) } func (c *checker) resolveOverloadOrError( - loc common.Location, - e *exprpb.Expr, - fn *exprpb.Decl, target *exprpb.Expr, args []*exprpb.Expr) { + e ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) { // Attempt to resolve the overload. - resolution := c.resolveOverload(loc, fn, target, args) + resolution := c.resolveOverload(e, fn, target, args) // No such overload, error noted in the resolveOverload call, type recorded here. if resolution == nil { - c.setType(e, decls.Error) + c.setType(e, types.ErrorType) return } // Overload found. @@ -338,10 +284,9 @@ func (c *checker) resolveOverloadOrError( } func (c *checker) resolveOverload( - loc common.Location, - fn *exprpb.Decl, target *exprpb.Expr, args []*exprpb.Expr) *overloadResolution { + call ast.Expr, fn *decls.FunctionDecl, target ast.Expr, args []ast.Expr) *overloadResolution { - var argTypes []*exprpb.Type + var argTypes []*types.Type if target != nil { argTypes = append(argTypes, c.getType(target)) } @@ -349,234 +294,252 @@ func (c *checker) resolveOverload( argTypes = append(argTypes, c.getType(arg)) } - var resultType *exprpb.Type - var checkedRef *exprpb.Reference - for _, overload := range fn.GetFunction().GetOverloads() { + var resultType *types.Type + var checkedRef *ast.ReferenceInfo + for _, overload := range fn.OverloadDecls() { // Determine whether the overload is currently considered. - if c.env.isOverloadDisabled(overload.GetOverloadId()) { + if c.env.isOverloadDisabled(overload.ID()) { continue } // Ensure the call style for the overload matches. - if (target == nil && overload.GetIsInstanceFunction()) || - (target != nil && !overload.GetIsInstanceFunction()) { + if (target == nil && overload.IsMemberFunction()) || + (target != nil && !overload.IsMemberFunction()) { // not a compatible call style. continue } - overloadType := decls.NewFunctionType(overload.ResultType, overload.Params...) - if len(overload.GetTypeParams()) > 0 { + // Alternative type-checking behavior when the logical operators are compacted into + // variadic AST representations. + if fn.Name() == operators.LogicalAnd || fn.Name() == operators.LogicalOr { + checkedRef = ast.NewFunctionReference(overload.ID()) + for i, argType := range argTypes { + if !c.isAssignable(argType, types.BoolType) { + c.errors.typeMismatch( + args[i].ID(), + c.locationByID(args[i].ID()), + types.BoolType, + argType) + resultType = types.ErrorType + } + } + if isError(resultType) { + return nil + } + return newResolution(checkedRef, types.BoolType) + } + + overloadType := newFunctionType(overload.ResultType(), overload.ArgTypes()...) + typeParams := overload.TypeParams() + if len(typeParams) != 0 { // Instantiate overload's type with fresh type variables. substitutions := newMapping() - for _, typePar := range overload.GetTypeParams() { - substitutions.add(decls.NewTypeParamType(typePar), c.newTypeVar()) + for _, typePar := range typeParams { + substitutions.add(types.NewTypeParamType(typePar), c.newTypeVar()) } overloadType = substitute(substitutions, overloadType, false) } - candidateArgTypes := overloadType.GetFunction().GetArgTypes() + candidateArgTypes := overloadType.Parameters()[1:] if c.isAssignableList(argTypes, candidateArgTypes) { if checkedRef == nil { - checkedRef = newFunctionReference(overload.GetOverloadId()) + checkedRef = ast.NewFunctionReference(overload.ID()) } else { - checkedRef.OverloadId = append(checkedRef.GetOverloadId(), overload.GetOverloadId()) + checkedRef.AddOverload(overload.ID()) } // First matching overload, determines result type. - fnResultType := substitute(c.mappings, overloadType.GetFunction().GetResultType(), false) + fnResultType := substitute(c.mappings, overloadType.Parameters()[0], false) if resultType == nil { resultType = fnResultType - } else if !isDyn(resultType) && !proto.Equal(fnResultType, resultType) { - resultType = decls.Dyn + } else if !isDyn(resultType) && !fnResultType.IsExactType(resultType) { + resultType = types.DynType } } } if resultType == nil { - for i, arg := range argTypes { - argTypes[i] = substitute(c.mappings, arg, true) + for i, argType := range argTypes { + argTypes[i] = substitute(c.mappings, argType, true) } - c.errors.noMatchingOverload(loc, fn.GetName(), argTypes, target != nil) - resultType = decls.Error + c.errors.noMatchingOverload(call.ID(), c.location(call), fn.Name(), argTypes, target != nil) return nil } return newResolution(checkedRef, resultType) } -func (c *checker) checkCreateList(e *exprpb.Expr) { - create := e.GetListExpr() - var elemsType *exprpb.Type - optionalIndices := create.GetOptionalIndices() +func (c *checker) checkCreateList(e ast.Expr) { + create := e.AsList() + var elemsType *types.Type + optionalIndices := create.OptionalIndices() optionals := make(map[int32]bool, len(optionalIndices)) for _, optInd := range optionalIndices { optionals[optInd] = true } - for i, e := range create.GetElements() { + for i, e := range create.Elements() { c.check(e) elemType := c.getType(e) if optionals[int32(i)] { var isOptional bool elemType, isOptional = maybeUnwrapOptional(elemType) if !isOptional && !isDyn(elemType) { - c.errors.typeMismatch(c.location(e), decls.NewOptionalType(elemType), elemType) + c.errors.typeMismatch(e.ID(), c.location(e), types.NewOptionalType(elemType), elemType) } } - elemsType = c.joinTypes(c.location(e), elemsType, elemType) + elemsType = c.joinTypes(e, elemsType, elemType) } if elemsType == nil { // If the list is empty, assign free type var to elem type. elemsType = c.newTypeVar() } - c.setType(e, decls.NewListType(elemsType)) -} - -func (c *checker) checkCreateStruct(e *exprpb.Expr) { - str := e.GetStructExpr() - if str.GetMessageName() != "" { - c.checkCreateMessage(e) - } else { - c.checkCreateMap(e) - } + c.setType(e, types.NewListType(elemsType)) } -func (c *checker) checkCreateMap(e *exprpb.Expr) { - mapVal := e.GetStructExpr() - var mapKeyType *exprpb.Type - var mapValueType *exprpb.Type - for _, ent := range mapVal.GetEntries() { - key := ent.GetMapKey() +func (c *checker) checkCreateMap(e ast.Expr) { + mapVal := e.AsMap() + var mapKeyType *types.Type + var mapValueType *types.Type + for _, e := range mapVal.Entries() { + entry := e.AsMapEntry() + key := entry.Key() c.check(key) - mapKeyType = c.joinTypes(c.location(key), mapKeyType, c.getType(key)) + mapKeyType = c.joinTypes(key, mapKeyType, c.getType(key)) - val := ent.GetValue() + val := entry.Value() c.check(val) valType := c.getType(val) - if ent.GetOptionalEntry() { + if entry.IsOptional() { var isOptional bool valType, isOptional = maybeUnwrapOptional(valType) if !isOptional && !isDyn(valType) { - c.errors.typeMismatch(c.location(val), decls.NewOptionalType(valType), valType) + c.errors.typeMismatch(val.ID(), c.location(val), types.NewOptionalType(valType), valType) } } - mapValueType = c.joinTypes(c.location(val), mapValueType, valType) + mapValueType = c.joinTypes(val, mapValueType, valType) } if mapKeyType == nil { // If the map is empty, assign free type variables to typeKey and value type. mapKeyType = c.newTypeVar() mapValueType = c.newTypeVar() } - c.setType(e, decls.NewMapType(mapKeyType, mapValueType)) + c.setType(e, types.NewMapType(mapKeyType, mapValueType)) } -func (c *checker) checkCreateMessage(e *exprpb.Expr) { - msgVal := e.GetStructExpr() +func (c *checker) checkCreateStruct(e ast.Expr) { + msgVal := e.AsStruct() // Determine the type of the message. - messageType := decls.Error - decl := c.env.LookupIdent(msgVal.GetMessageName()) - if decl == nil { + resultType := types.ErrorType + ident := c.env.LookupIdent(msgVal.TypeName()) + if ident == nil { c.errors.undeclaredReference( - c.location(e), c.env.container.Name(), msgVal.GetMessageName()) + e.ID(), c.location(e), c.env.container.Name(), msgVal.TypeName()) + c.setType(e, types.ErrorType) return } // Ensure the type name is fully qualified in the AST. - msgVal.MessageName = decl.GetName() - c.setReference(e, newIdentReference(decl.GetName(), nil)) - ident := decl.GetIdent() - identKind := kindOf(ident.GetType()) - if identKind != kindError { - if identKind != kindType { - c.errors.notAType(c.location(e), ident.GetType()) + typeName := ident.Name() + if msgVal.TypeName() != typeName { + e.SetKindCase(c.NewStruct(e.ID(), typeName, msgVal.Fields())) + msgVal = e.AsStruct() + } + c.setReference(e, ast.NewIdentReference(typeName, nil)) + identKind := ident.Type().Kind() + if identKind != types.ErrorKind { + if identKind != types.TypeKind { + c.errors.notAType(e.ID(), c.location(e), ident.Type().DeclaredTypeName()) } else { - messageType = ident.GetType().GetType() - if kindOf(messageType) != kindObject { - c.errors.notAMessageType(c.location(e), messageType) - messageType = decls.Error + resultType = ident.Type().Parameters()[0] + // Backwards compatibility test between well-known types and message types + // In this context, the type is being instantiated by its protobuf name which + // is not ideal or recommended, but some users expect this to work. + if isWellKnownType(resultType) { + typeName = getWellKnownTypeName(resultType) + } else if resultType.Kind() == types.StructKind { + typeName = resultType.DeclaredTypeName() + } else { + c.errors.notAMessageType(e.ID(), c.location(e), resultType.DeclaredTypeName()) + resultType = types.ErrorType } } } - if isObjectWellKnownType(messageType) { - c.setType(e, getObjectWellKnownType(messageType)) - } else { - c.setType(e, messageType) - } + c.setType(e, resultType) // Check the field initializers. - for _, ent := range msgVal.GetEntries() { - field := ent.GetFieldKey() - value := ent.GetValue() + for _, f := range msgVal.Fields() { + field := f.AsStructField() + fieldName := field.Name() + value := field.Value() c.check(value) - fieldType := decls.Error - ft, found := c.lookupFieldType(c.locationByID(ent.GetId()), messageType.GetMessageType(), field) + fieldType := types.ErrorType + ft, found := c.lookupFieldType(f.ID(), typeName, fieldName) if found { - fieldType = ft.Type + fieldType = ft } valType := c.getType(value) - if ent.GetOptionalEntry() { + if field.IsOptional() { var isOptional bool valType, isOptional = maybeUnwrapOptional(valType) if !isOptional && !isDyn(valType) { - c.errors.typeMismatch(c.location(value), decls.NewOptionalType(valType), valType) + c.errors.typeMismatch(value.ID(), c.location(value), types.NewOptionalType(valType), valType) } } if !c.isAssignable(fieldType, valType) { - c.errors.fieldTypeMismatch(c.locationByID(ent.Id), field, fieldType, valType) + c.errors.fieldTypeMismatch(f.ID(), c.locationByID(f.ID()), fieldName, fieldType, valType) } } } -func (c *checker) checkComprehension(e *exprpb.Expr) { - comp := e.GetComprehensionExpr() - c.check(comp.GetIterRange()) - c.check(comp.GetAccuInit()) - accuType := c.getType(comp.GetAccuInit()) - rangeType := substitute(c.mappings, c.getType(comp.GetIterRange()), false) - var varType *exprpb.Type - - switch kindOf(rangeType) { - case kindList: - varType = rangeType.GetListType().GetElemType() - case kindMap: +func (c *checker) checkComprehension(e ast.Expr) { + comp := e.AsComprehension() + c.check(comp.IterRange()) + c.check(comp.AccuInit()) + accuType := c.getType(comp.AccuInit()) + rangeType := substitute(c.mappings, c.getType(comp.IterRange()), false) + var varType *types.Type + + switch rangeType.Kind() { + case types.ListKind: + varType = rangeType.Parameters()[0] + case types.MapKind: // Ranges over the keys. - varType = rangeType.GetMapType().GetKeyType() - case kindDyn, kindError, kindTypeParam: + varType = rangeType.Parameters()[0] + case types.DynKind, types.ErrorKind, types.TypeParamKind: // Set the range type to DYN to prevent assignment to a potentially incorrect type // at a later point in type-checking. The isAssignable call will update the type // substitutions for the type param under the covers. - c.isAssignable(decls.Dyn, rangeType) + c.isAssignable(types.DynType, rangeType) // Set the range iteration variable to type DYN as well. - varType = decls.Dyn + varType = types.DynType default: - c.errors.notAComprehensionRange(c.location(comp.GetIterRange()), rangeType) - varType = decls.Error + c.errors.notAComprehensionRange(comp.IterRange().ID(), c.location(comp.IterRange()), rangeType) + varType = types.ErrorType } // Create a scope for the comprehension since it has a local accumulation variable. // This scope will contain the accumulation variable used to compute the result. c.env = c.env.enterScope() - c.env.Add(decls.NewVar(comp.GetAccuVar(), accuType)) + c.env.AddIdents(decls.NewVariable(comp.AccuVar(), accuType)) // Create a block scope for the loop. c.env = c.env.enterScope() - c.env.Add(decls.NewVar(comp.GetIterVar(), varType)) + c.env.AddIdents(decls.NewVariable(comp.IterVar(), varType)) // Check the variable references in the condition and step. - c.check(comp.GetLoopCondition()) - c.assertType(comp.GetLoopCondition(), decls.Bool) - c.check(comp.GetLoopStep()) - c.assertType(comp.GetLoopStep(), accuType) + c.check(comp.LoopCondition()) + c.assertType(comp.LoopCondition(), types.BoolType) + c.check(comp.LoopStep()) + c.assertType(comp.LoopStep(), accuType) // Exit the loop's block scope before checking the result. c.env = c.env.exitScope() - c.check(comp.GetResult()) + c.check(comp.Result()) // Exit the comprehension scope. c.env = c.env.exitScope() - c.setType(e, substitute(c.mappings, c.getType(comp.GetResult()), false)) + c.setType(e, substitute(c.mappings, c.getType(comp.Result()), false)) } // Checks compatibility of joined types, and returns the most general common type. -func (c *checker) joinTypes(loc common.Location, - previous *exprpb.Type, - current *exprpb.Type) *exprpb.Type { +func (c *checker) joinTypes(e ast.Expr, previous, current *types.Type) *types.Type { if previous == nil { return current } @@ -584,23 +547,23 @@ func (c *checker) joinTypes(loc common.Location, return mostGeneral(previous, current) } if c.dynAggregateLiteralElementTypesEnabled() { - return decls.Dyn + return types.DynType } - c.errors.typeMismatch(loc, previous, current) - return decls.Error + c.errors.typeMismatch(e.ID(), c.location(e), previous, current) + return types.ErrorType } func (c *checker) dynAggregateLiteralElementTypesEnabled() bool { return c.env.aggLitElemType == dynElementType } -func (c *checker) newTypeVar() *exprpb.Type { +func (c *checker) newTypeVar() *types.Type { id := c.freeTypeVarCounter c.freeTypeVarCounter++ - return decls.NewTypeParamType(fmt.Sprintf("_var%d", id)) + return types.NewTypeParamType(fmt.Sprintf("_var%d", id)) } -func (c *checker) isAssignable(t1 *exprpb.Type, t2 *exprpb.Type) bool { +func (c *checker) isAssignable(t1, t2 *types.Type) bool { subs := isAssignable(c.mappings, t1, t2) if subs != nil { c.mappings = subs @@ -610,7 +573,7 @@ func (c *checker) isAssignable(t1 *exprpb.Type, t2 *exprpb.Type) bool { return false } -func (c *checker) isAssignableList(l1 []*exprpb.Type, l2 []*exprpb.Type) bool { +func (c *checker) isAssignableList(l1, l2 []*types.Type) bool { subs := isAssignableList(c.mappings, l1, l2) if subs != nil { c.mappings = subs @@ -620,87 +583,114 @@ func (c *checker) isAssignableList(l1 []*exprpb.Type, l2 []*exprpb.Type) bool { return false } -func (c *checker) lookupFieldType(l common.Location, messageType string, fieldName string) (*ref.FieldType, bool) { - if _, found := c.env.provider.FindType(messageType); !found { - // This should not happen, anyway, report an error. - c.errors.unexpectedFailedResolution(l, messageType) - return nil, false - } - - if ft, found := c.env.provider.FindFieldType(messageType, fieldName); found { - return ft, found +func maybeUnwrapString(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.LiteralKind: + literal := e.AsLiteral() + switch v := literal.(type) { + case types.String: + return string(v), true + } } - - c.errors.undefinedField(l, fieldName) - return nil, false + return "", false } -func (c *checker) setType(e *exprpb.Expr, t *exprpb.Type) { - if old, found := c.types[e.GetId()]; found && !proto.Equal(old, t) { - c.errors.ReportError(c.location(e), - "(Incompatible) Type already exists for expression: %v(%d) old:%v, new:%v", e, e.GetId(), old, t) +func (c *checker) setType(e ast.Expr, t *types.Type) { + if old, found := c.TypeMap()[e.ID()]; found && !old.IsExactType(t) { + c.errors.incompatibleType(e.ID(), c.location(e), e, old, t) return } - c.types[e.GetId()] = t + c.SetType(e.ID(), t) } -func (c *checker) getType(e *exprpb.Expr) *exprpb.Type { - return c.types[e.GetId()] +func (c *checker) getType(e ast.Expr) *types.Type { + return c.TypeMap()[e.ID()] } -func (c *checker) setReference(e *exprpb.Expr, r *exprpb.Reference) { - if old, found := c.references[e.GetId()]; found && !proto.Equal(old, r) { - c.errors.ReportError(c.location(e), - "Reference already exists for expression: %v(%d) old:%v, new:%v", e, e.GetId(), old, r) +func (c *checker) setReference(e ast.Expr, r *ast.ReferenceInfo) { + if old, found := c.ReferenceMap()[e.ID()]; found && !old.Equals(r) { + c.errors.referenceRedefinition(e.ID(), c.location(e), e, old, r) return } - c.references[e.GetId()] = r + c.SetReference(e.ID(), r) } -func (c *checker) assertType(e *exprpb.Expr, t *exprpb.Type) { +func (c *checker) assertType(e ast.Expr, t *types.Type) { if !c.isAssignable(t, c.getType(e)) { - c.errors.typeMismatch(c.location(e), t, c.getType(e)) + c.errors.typeMismatch(e.ID(), c.location(e), t, c.getType(e)) } } type overloadResolution struct { - Reference *exprpb.Reference - Type *exprpb.Type + Type *types.Type + Reference *ast.ReferenceInfo } -func newResolution(checkedRef *exprpb.Reference, t *exprpb.Type) *overloadResolution { +func newResolution(r *ast.ReferenceInfo, t *types.Type) *overloadResolution { return &overloadResolution{ - Reference: checkedRef, + Reference: r, Type: t, } } -func (c *checker) location(e *exprpb.Expr) common.Location { - return c.locationByID(e.GetId()) +func (c *checker) location(e ast.Expr) common.Location { + return c.locationByID(e.ID()) } func (c *checker) locationByID(id int64) common.Location { - positions := c.sourceInfo.GetPositions() - var line = 1 - if offset, found := positions[id]; found { - col := int(offset) - for _, lineOffset := range c.sourceInfo.GetLineOffsets() { - if lineOffset < offset { - line++ - col = int(offset - lineOffset) - } else { - break - } - } - return common.NewLocation(line, col) + return c.SourceInfo().GetStartLocation(id) +} + +func (c *checker) lookupFieldType(exprID int64, structType, fieldName string) (*types.Type, bool) { + if _, found := c.env.provider.FindStructType(structType); !found { + // This should not happen, anyway, report an error. + c.errors.unexpectedFailedResolution(exprID, c.locationByID(exprID), structType) + return nil, false } - return common.NoLocation + + if ft, found := c.env.provider.FindStructFieldType(structType, fieldName); found { + return ft.Type, found + } + + c.errors.undefinedField(exprID, c.locationByID(exprID), fieldName) + return nil, false } -func newIdentReference(name string, value *exprpb.Constant) *exprpb.Reference { - return &exprpb.Reference{Name: name, Value: value} +func isWellKnownType(t *types.Type) bool { + switch t.Kind() { + case types.AnyKind, types.TimestampKind, types.DurationKind, types.DynKind, types.NullTypeKind: + return true + case types.BoolKind, types.BytesKind, types.DoubleKind, types.IntKind, types.StringKind, types.UintKind: + return t.IsAssignableType(types.NullType) + case types.ListKind: + return t.Parameters()[0] == types.DynType + case types.MapKind: + return t.Parameters()[0] == types.StringType && t.Parameters()[1] == types.DynType + } + return false } -func newFunctionReference(overloads ...string) *exprpb.Reference { - return &exprpb.Reference{OverloadId: overloads} +func getWellKnownTypeName(t *types.Type) string { + if name, found := wellKnownTypes[t.Kind()]; found { + return name + } + return "" } + +var ( + wellKnownTypes = map[types.Kind]string{ + types.AnyKind: "google.protobuf.Any", + types.BoolKind: "google.protobuf.BoolValue", + types.BytesKind: "google.protobuf.BytesValue", + types.DoubleKind: "google.protobuf.DoubleValue", + types.DurationKind: "google.protobuf.Duration", + types.DynKind: "google.protobuf.Value", + types.IntKind: "google.protobuf.Int64Value", + types.ListKind: "google.protobuf.ListValue", + types.NullTypeKind: "google.protobuf.NullValue", + types.MapKind: "google.protobuf.Struct", + types.StringKind: "google.protobuf.StringValue", + types.TimestampKind: "google.protobuf.Timestamp", + types.UintKind: "google.protobuf.UInt64Value", + } +) diff --git a/vendor/github.com/google/cel-go/checker/cost.go b/vendor/github.com/google/cel-go/checker/cost.go index ef58df766..3470d0a3f 100644 --- a/vendor/github.com/google/cel-go/checker/cost.go +++ b/vendor/github.com/google/cel-go/checker/cost.go @@ -18,10 +18,10 @@ import ( "math" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // WARNING: Any changes to cost calculations in this file require a corresponding change in interpreter/runtimecost.go @@ -54,9 +54,9 @@ type AstNode interface { // The first path element is a variable. All subsequent path elements are one of: field name, '@items', '@keys', '@values'. Path() []string // Type returns the deduced type of the AstNode. - Type() *exprpb.Type + Type() *types.Type // Expr returns the expression of the AstNode. - Expr() *exprpb.Expr + Expr() ast.Expr // ComputedSize returns a size estimate of the AstNode derived from information available in the CEL expression. // For constants and inline list and map declarations, the exact size is returned. For concatenated list, strings // and bytes, the size is derived from the size estimates of the operands. nil is returned if there is no @@ -66,8 +66,8 @@ type AstNode interface { type astNode struct { path []string - t *exprpb.Type - expr *exprpb.Expr + t *types.Type + expr ast.Expr derivedSize *SizeEstimate } @@ -75,11 +75,11 @@ func (e astNode) Path() []string { return e.path } -func (e astNode) Type() *exprpb.Type { +func (e astNode) Type() *types.Type { return e.t } -func (e astNode) Expr() *exprpb.Expr { +func (e astNode) Expr() ast.Expr { return e.expr } @@ -88,29 +88,27 @@ func (e astNode) ComputedSize() *SizeEstimate { return e.derivedSize } var v uint64 - switch ek := e.expr.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - switch ck := ek.ConstExpr.GetConstantKind().(type) { - case *exprpb.Constant_StringValue: + switch e.expr.Kind() { + case ast.LiteralKind: + switch ck := e.expr.AsLiteral().(type) { + case types.String: // converting to runes here is an O(n) operation, but // this is consistent with how size is computed at runtime, // and how the language definition defines string size - v = uint64(len([]rune(ck.StringValue))) - case *exprpb.Constant_BytesValue: - v = uint64(len(ck.BytesValue)) - case *exprpb.Constant_BoolValue, *exprpb.Constant_DoubleValue, *exprpb.Constant_DurationValue, - *exprpb.Constant_Int64Value, *exprpb.Constant_TimestampValue, *exprpb.Constant_Uint64Value, - *exprpb.Constant_NullValue: + v = uint64(len([]rune(ck))) + case types.Bytes: + v = uint64(len(ck)) + case types.Bool, types.Double, types.Duration, + types.Int, types.Timestamp, types.Uint, + types.Null: v = uint64(1) default: return nil } - case *exprpb.Expr_ListExpr: - v = uint64(len(ek.ListExpr.GetElements())) - case *exprpb.Expr_StructExpr: - if ek.StructExpr.GetMessageName() == "" { - v = uint64(len(ek.StructExpr.GetEntries())) - } + case ast.ListKind: + v = uint64(e.expr.AsList().Size()) + case ast.MapKind: + v = uint64(e.expr.AsMap().Size()) default: return nil } @@ -228,7 +226,7 @@ func addUint64NoOverflow(x, y uint64) uint64 { // multiplyUint64NoOverflow multiplies non-negative ints. If the result is exceeds math.MaxUint64, math.MaxUint64 // is returned. func multiplyUint64NoOverflow(x, y uint64) uint64 { - if x > 0 && y > 0 && x > math.MaxUint64/y { + if y != 0 && x > math.MaxUint64/y { return math.MaxUint64 } return x * y @@ -240,7 +238,11 @@ func multiplyByCostFactor(x uint64, y float64) uint64 { if xFloat > 0 && y > 0 && xFloat > math.MaxUint64/y { return math.MaxUint64 } - return uint64(math.Ceil(xFloat * y)) + ceil := math.Ceil(xFloat * y) + if ceil >= doubleTwoTo64 { + return math.MaxUint64 + } + return uint64(ceil) } var ( @@ -258,9 +260,10 @@ type coster struct { // iterRanges tracks the iterRange of each iterVar. iterRanges iterRangeScopes // computedSizes tracks the computed sizes of call results. - computedSizes map[int64]SizeEstimate - checkedExpr *exprpb.CheckedExpr - estimator CostEstimator + computedSizes map[int64]SizeEstimate + checkedAST *ast.AST + estimator CostEstimator + overloadEstimators map[string]FunctionEstimator // presenceTestCost will either be a zero or one based on whether has() macros count against cost computations. presenceTestCost CostEstimate } @@ -268,8 +271,8 @@ type coster struct { // Use a stack of iterVar -> iterRange Expr Ids to handle shadowed variable names. type iterRangeScopes map[string][]int64 -func (vs iterRangeScopes) push(varName string, expr *exprpb.Expr) { - vs[varName] = append(vs[varName], expr.GetId()) +func (vs iterRangeScopes) push(varName string, expr ast.Expr) { + vs[varName] = append(vs[varName], expr.ID()) } func (vs iterRangeScopes) pop(varName string) { @@ -289,6 +292,7 @@ func (vs iterRangeScopes) peek(varName string) (int64, bool) { type CostOption func(*coster) error // PresenceTestHasCost determines whether presence testing has a cost of one or zero. +// // Defaults to presence test has a cost of one. func PresenceTestHasCost(hasCost bool) CostOption { return func(c *coster) error { @@ -301,15 +305,30 @@ func PresenceTestHasCost(hasCost bool) CostOption { } } +// FunctionEstimator provides a CallEstimate given the target and arguments for a specific function, overload pair. +type FunctionEstimator func(estimator CostEstimator, target *AstNode, args []AstNode) *CallEstimate + +// OverloadCostEstimate binds a FunctionCoster to a specific function overload ID. +// +// When a OverloadCostEstimate is provided, it will override the cost calculation of the CostEstimator provided to +// the Cost() call. +func OverloadCostEstimate(overloadID string, functionCoster FunctionEstimator) CostOption { + return func(c *coster) error { + c.overloadEstimators[overloadID] = functionCoster + return nil + } +} + // Cost estimates the cost of the parsed and type checked CEL expression. -func Cost(checker *exprpb.CheckedExpr, estimator CostEstimator, opts ...CostOption) (CostEstimate, error) { +func Cost(checked *ast.AST, estimator CostEstimator, opts ...CostOption) (CostEstimate, error) { c := &coster{ - checkedExpr: checker, - estimator: estimator, - exprPath: map[int64][]string{}, - iterRanges: map[string][]int64{}, - computedSizes: map[int64]SizeEstimate{}, - presenceTestCost: CostEstimate{Min: 1, Max: 1}, + checkedAST: checked, + estimator: estimator, + overloadEstimators: map[string]FunctionEstimator{}, + exprPath: map[int64][]string{}, + iterRanges: map[string][]int64{}, + computedSizes: map[int64]SizeEstimate{}, + presenceTestCost: CostEstimate{Min: 1, Max: 1}, } for _, opt := range opts { err := opt(c) @@ -317,28 +336,30 @@ func Cost(checker *exprpb.CheckedExpr, estimator CostEstimator, opts ...CostOpti return CostEstimate{}, err } } - return c.cost(checker.GetExpr()), nil + return c.cost(checked.Expr()), nil } -func (c *coster) cost(e *exprpb.Expr) CostEstimate { +func (c *coster) cost(e ast.Expr) CostEstimate { if e == nil { return CostEstimate{} } var cost CostEstimate - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: + switch e.Kind() { + case ast.LiteralKind: cost = constCost - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: cost = c.costIdent(e) - case *exprpb.Expr_SelectExpr: + case ast.SelectKind: cost = c.costSelect(e) - case *exprpb.Expr_CallExpr: + case ast.CallKind: cost = c.costCall(e) - case *exprpb.Expr_ListExpr: + case ast.ListKind: cost = c.costCreateList(e) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + cost = c.costCreateMap(e) + case ast.StructKind: cost = c.costCreateStruct(e) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: cost = c.costComprehension(e) default: return CostEstimate{} @@ -346,53 +367,51 @@ func (c *coster) cost(e *exprpb.Expr) CostEstimate { return cost } -func (c *coster) costIdent(e *exprpb.Expr) CostEstimate { - identExpr := e.GetIdentExpr() - +func (c *coster) costIdent(e ast.Expr) CostEstimate { + identName := e.AsIdent() // build and track the field path - if iterRange, ok := c.iterRanges.peek(identExpr.GetName()); ok { - switch c.checkedExpr.TypeMap[iterRange].GetTypeKind().(type) { - case *exprpb.Type_ListType_: + if iterRange, ok := c.iterRanges.peek(identName); ok { + switch c.checkedAST.GetType(iterRange).Kind() { + case types.ListKind: c.addPath(e, append(c.exprPath[iterRange], "@items")) - case *exprpb.Type_MapType_: + case types.MapKind: c.addPath(e, append(c.exprPath[iterRange], "@keys")) } } else { - c.addPath(e, []string{identExpr.GetName()}) + c.addPath(e, []string{identName}) } return selectAndIdentCost } -func (c *coster) costSelect(e *exprpb.Expr) CostEstimate { - sel := e.GetSelectExpr() +func (c *coster) costSelect(e ast.Expr) CostEstimate { + sel := e.AsSelect() var sum CostEstimate - if sel.GetTestOnly() { + if sel.IsTestOnly() { // recurse, but do not add any cost // this is equivalent to how evalTestOnly increments the runtime cost counter // but does not add any additional cost for the qualifier, except here we do // the reverse (ident adds cost) sum = sum.Add(c.presenceTestCost) - sum = sum.Add(c.cost(sel.GetOperand())) + sum = sum.Add(c.cost(sel.Operand())) return sum } - sum = sum.Add(c.cost(sel.GetOperand())) - targetType := c.getType(sel.GetOperand()) - switch kindOf(targetType) { - case kindMap, kindObject, kindTypeParam: + sum = sum.Add(c.cost(sel.Operand())) + targetType := c.getType(sel.Operand()) + switch targetType.Kind() { + case types.MapKind, types.StructKind, types.TypeParamKind: sum = sum.Add(selectAndIdentCost) } // build and track the field path - c.addPath(e, append(c.getPath(sel.GetOperand()), sel.GetField())) + c.addPath(e, append(c.getPath(sel.Operand()), sel.FieldName())) return sum } -func (c *coster) costCall(e *exprpb.Expr) CostEstimate { - call := e.GetCallExpr() - target := call.GetTarget() - args := call.GetArgs() +func (c *coster) costCall(e ast.Expr) CostEstimate { + call := e.AsCall() + args := call.Args() var sum CostEstimate @@ -403,22 +422,20 @@ func (c *coster) costCall(e *exprpb.Expr) CostEstimate { argTypes[i] = c.newAstNode(arg) } - ref := c.checkedExpr.ReferenceMap[e.GetId()] - if ref == nil || len(ref.GetOverloadId()) == 0 { + overloadIDs := c.checkedAST.GetOverloadIDs(e.ID()) + if len(overloadIDs) == 0 { return CostEstimate{} } var targetType AstNode - if target != nil { - if call.Target != nil { - sum = sum.Add(c.cost(call.GetTarget())) - targetType = c.newAstNode(call.GetTarget()) - } + if call.IsMemberFunction() { + sum = sum.Add(c.cost(call.Target())) + targetType = c.newAstNode(call.Target()) } // Pick a cost estimate range that covers all the overload cost estimation ranges fnCost := CostEstimate{Min: uint64(math.MaxUint64), Max: 0} var resultSize *SizeEstimate - for _, overload := range ref.GetOverloadId() { - overloadCost := c.functionCost(call.GetFunction(), overload, &targetType, argTypes, argCosts) + for _, overload := range overloadIDs { + overloadCost := c.functionCost(call.FunctionName(), overload, &targetType, argTypes, argCosts) fnCost = fnCost.Union(overloadCost.CostEstimate) if overloadCost.ResultSize != nil { if resultSize == nil { @@ -441,62 +458,54 @@ func (c *coster) costCall(e *exprpb.Expr) CostEstimate { } } if resultSize != nil { - c.computedSizes[e.GetId()] = *resultSize + c.computedSizes[e.ID()] = *resultSize } return sum.Add(fnCost) } -func (c *coster) costCreateList(e *exprpb.Expr) CostEstimate { - create := e.GetListExpr() +func (c *coster) costCreateList(e ast.Expr) CostEstimate { + create := e.AsList() var sum CostEstimate - for _, e := range create.GetElements() { + for _, e := range create.Elements() { sum = sum.Add(c.cost(e)) } return sum.Add(createListBaseCost) } -func (c *coster) costCreateStruct(e *exprpb.Expr) CostEstimate { - str := e.GetStructExpr() - if str.MessageName != "" { - return c.costCreateMessage(e) - } - return c.costCreateMap(e) -} - -func (c *coster) costCreateMap(e *exprpb.Expr) CostEstimate { - mapVal := e.GetStructExpr() +func (c *coster) costCreateMap(e ast.Expr) CostEstimate { + mapVal := e.AsMap() var sum CostEstimate - for _, ent := range mapVal.GetEntries() { - key := ent.GetMapKey() - sum = sum.Add(c.cost(key)) - - sum = sum.Add(c.cost(ent.GetValue())) + for _, ent := range mapVal.Entries() { + entry := ent.AsMapEntry() + sum = sum.Add(c.cost(entry.Key())) + sum = sum.Add(c.cost(entry.Value())) } return sum.Add(createMapBaseCost) } -func (c *coster) costCreateMessage(e *exprpb.Expr) CostEstimate { - msgVal := e.GetStructExpr() +func (c *coster) costCreateStruct(e ast.Expr) CostEstimate { + msgVal := e.AsStruct() var sum CostEstimate - for _, ent := range msgVal.GetEntries() { - sum = sum.Add(c.cost(ent.GetValue())) + for _, ent := range msgVal.Fields() { + field := ent.AsStructField() + sum = sum.Add(c.cost(field.Value())) } return sum.Add(createMessageBaseCost) } -func (c *coster) costComprehension(e *exprpb.Expr) CostEstimate { - comp := e.GetComprehensionExpr() +func (c *coster) costComprehension(e ast.Expr) CostEstimate { + comp := e.AsComprehension() var sum CostEstimate - sum = sum.Add(c.cost(comp.GetIterRange())) - sum = sum.Add(c.cost(comp.GetAccuInit())) + sum = sum.Add(c.cost(comp.IterRange())) + sum = sum.Add(c.cost(comp.AccuInit())) // Track the iterRange of each IterVar for field path construction - c.iterRanges.push(comp.GetIterVar(), comp.GetIterRange()) - loopCost := c.cost(comp.GetLoopCondition()) - stepCost := c.cost(comp.GetLoopStep()) - c.iterRanges.pop(comp.GetIterVar()) - sum = sum.Add(c.cost(comp.Result)) - rangeCnt := c.sizeEstimate(c.newAstNode(comp.GetIterRange())) + c.iterRanges.push(comp.IterVar(), comp.IterRange()) + loopCost := c.cost(comp.LoopCondition()) + stepCost := c.cost(comp.LoopStep()) + c.iterRanges.pop(comp.IterVar()) + sum = sum.Add(c.cost(comp.Result())) + rangeCnt := c.sizeEstimate(c.newAstNode(comp.IterRange())) rangeCost := rangeCnt.MultiplyByCost(stepCost.Add(loopCost)) sum = sum.Add(rangeCost) @@ -530,7 +539,14 @@ func (c *coster) functionCost(function, overloadID string, target *AstNode, args } return sum } - + if len(c.overloadEstimators) != 0 { + if estimator, found := c.overloadEstimators[overloadID]; found { + if est := estimator(c.estimator, target, args); est != nil { + callEst := *est + return CallEstimate{CostEstimate: callEst.Add(argCostSum()), ResultSize: est.ResultSize} + } + } + } if est := c.estimator.EstimateCallCost(function, overloadID, target, args); est != nil { callEst := *est return CallEstimate{CostEstimate: callEst.Add(argCostSum()), ResultSize: est.ResultSize} @@ -641,44 +657,46 @@ func (c *coster) functionCost(function, overloadID string, target *AstNode, args return CallEstimate{CostEstimate: CostEstimate{Min: 1, Max: 1}.Add(argCostSum())} } -func (c *coster) getType(e *exprpb.Expr) *exprpb.Type { - return c.checkedExpr.TypeMap[e.GetId()] +func (c *coster) getType(e ast.Expr) *types.Type { + return c.checkedAST.GetType(e.ID()) } -func (c *coster) getPath(e *exprpb.Expr) []string { - return c.exprPath[e.GetId()] +func (c *coster) getPath(e ast.Expr) []string { + return c.exprPath[e.ID()] } -func (c *coster) addPath(e *exprpb.Expr, path []string) { - c.exprPath[e.GetId()] = path +func (c *coster) addPath(e ast.Expr, path []string) { + c.exprPath[e.ID()] = path } -func (c *coster) newAstNode(e *exprpb.Expr) *astNode { +func (c *coster) newAstNode(e ast.Expr) *astNode { path := c.getPath(e) if len(path) > 0 && path[0] == parser.AccumulatorName { // only provide paths to root vars; omit accumulator vars path = nil } var derivedSize *SizeEstimate - if size, ok := c.computedSizes[e.GetId()]; ok { + if size, ok := c.computedSizes[e.ID()]; ok { derivedSize = &size } - return &astNode{path: path, t: c.getType(e), expr: e, derivedSize: derivedSize} + return &astNode{ + path: path, + t: c.getType(e), + expr: e, + derivedSize: derivedSize} } // isScalar returns true if the given type is known to be of a constant size at // compile time. isScalar will return false for strings (they are variable-width) // in addition to protobuf.Any and protobuf.Value (their size is not knowable at compile time). -func isScalar(t *exprpb.Type) bool { - switch kindOf(t) { - case kindPrimitive: - if t.GetPrimitive() != exprpb.Type_STRING && t.GetPrimitive() != exprpb.Type_BYTES { - return true - } - case kindWellKnown: - if t.GetWellKnown() == exprpb.Type_DURATION || t.GetWellKnown() == exprpb.Type_TIMESTAMP { - return true - } +func isScalar(t *types.Type) bool { + switch t.Kind() { + case types.BoolKind, types.DoubleKind, types.DurationKind, types.IntKind, types.TimestampKind, types.UintKind: + return true } return false } + +var ( + doubleTwoTo64 = math.Ldexp(1.0, 64) +) diff --git a/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel b/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel index 9384be450..a6b0be292 100644 --- a/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel +++ b/vendor/github.com/google/cel-go/checker/decls/BUILD.bazel @@ -9,7 +9,6 @@ go_library( name = "go_default_library", srcs = [ "decls.go", - "scopes.go", ], importpath = "github.com/google/cel-go/checker/decls", deps = [ diff --git a/vendor/github.com/google/cel-go/checker/env.go b/vendor/github.com/google/cel-go/checker/env.go index be89d2d68..70682b17c 100644 --- a/vendor/github.com/google/cel-go/checker/env.go +++ b/vendor/github.com/google/cel-go/checker/env.go @@ -18,17 +18,11 @@ import ( "fmt" "strings" - "google.golang.org/protobuf/proto" - - "github.com/google/cel-go/checker/decls" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/decls" "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types" - "github.com/google/cel-go/common/types/pb" - "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/parser" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) type aggregateLiteralElementType int @@ -76,15 +70,15 @@ var ( // which can be used to assist with type-checking. type Env struct { container *containers.Container - provider ref.TypeProvider - declarations *decls.Scopes + provider types.Provider + declarations *Scopes aggLitElemType aggregateLiteralElementType filteredOverloadIDs map[string]struct{} } // NewEnv returns a new *Env with the given parameters. -func NewEnv(container *containers.Container, provider ref.TypeProvider, opts ...Option) (*Env, error) { - declarations := decls.NewScopes() +func NewEnv(container *containers.Container, provider types.Provider, opts ...Option) (*Env, error) { + declarations := newScopes() declarations.Push() envOptions := &options{} @@ -113,24 +107,31 @@ func NewEnv(container *containers.Container, provider ref.TypeProvider, opts ... }, nil } -// Add adds new Decl protos to the Env. -// Returns an error for identifier redeclarations. -func (e *Env) Add(decls ...*exprpb.Decl) error { +// AddIdents configures the checker with a list of variable declarations. +// +// If there are overlapping declarations, the method will error. +func (e *Env) AddIdents(declarations ...*decls.VariableDecl) error { errMsgs := make([]errorMsg, 0) - for _, decl := range decls { - switch decl.DeclKind.(type) { - case *exprpb.Decl_Ident: - errMsgs = append(errMsgs, e.addIdent(sanitizeIdent(decl))) - case *exprpb.Decl_Function: - errMsgs = append(errMsgs, e.setFunction(sanitizeFunction(decl))...) - } + for _, d := range declarations { + errMsgs = append(errMsgs, e.addIdent(d)) + } + return formatError(errMsgs) +} + +// AddFunctions configures the checker with a list of function declarations. +// +// If there are overlapping declarations, the method will error. +func (e *Env) AddFunctions(declarations ...*decls.FunctionDecl) error { + errMsgs := make([]errorMsg, 0) + for _, d := range declarations { + errMsgs = append(errMsgs, e.setFunction(d)...) } return formatError(errMsgs) } // LookupIdent returns a Decl proto for typeName as an identifier in the Env. // Returns nil if no such identifier is found in the Env. -func (e *Env) LookupIdent(name string) *exprpb.Decl { +func (e *Env) LookupIdent(name string) *decls.VariableDecl { for _, candidate := range e.container.ResolveCandidateNames(name) { if ident := e.declarations.FindIdent(candidate); ident != nil { return ident @@ -139,8 +140,8 @@ func (e *Env) LookupIdent(name string) *exprpb.Decl { // Next try to import the name as a reference to a message type. If found, // the declaration is added to the outest (global) scope of the // environment, so next time we can access it faster. - if t, found := e.provider.FindType(candidate); found { - decl := decls.NewVar(candidate, t) + if t, found := e.provider.FindStructType(candidate); found { + decl := decls.NewVariable(candidate, t) e.declarations.AddIdent(decl) return decl } @@ -148,11 +149,7 @@ func (e *Env) LookupIdent(name string) *exprpb.Decl { // Next try to import this as an enum value by splitting the name in a type prefix and // the enum inside. if enumValue := e.provider.EnumValue(candidate); enumValue.Type() != types.ErrType { - decl := decls.NewIdent(candidate, - decls.Int, - &exprpb.Constant{ - ConstantKind: &exprpb.Constant_Int64Value{ - Int64Value: int64(enumValue.(types.Int))}}) + decl := decls.NewConstant(candidate, types.IntType, enumValue) e.declarations.AddIdent(decl) return decl } @@ -162,7 +159,7 @@ func (e *Env) LookupIdent(name string) *exprpb.Decl { // LookupFunction returns a Decl proto for typeName as a function in env. // Returns nil if no such function is found in env. -func (e *Env) LookupFunction(name string) *exprpb.Decl { +func (e *Env) LookupFunction(name string) *decls.FunctionDecl { for _, candidate := range e.container.ResolveCandidateNames(name) { if fn := e.declarations.FindFunction(candidate); fn != nil { return fn @@ -171,88 +168,46 @@ func (e *Env) LookupFunction(name string) *exprpb.Decl { return nil } -// addOverload adds overload to function declaration f. -// Returns one or more errorMsg values if the overload overlaps with an existing overload or macro. -func (e *Env) addOverload(f *exprpb.Decl, overload *exprpb.Decl_FunctionDecl_Overload) []errorMsg { - errMsgs := make([]errorMsg, 0) - function := f.GetFunction() - emptyMappings := newMapping() - overloadFunction := decls.NewFunctionType(overload.GetResultType(), - overload.GetParams()...) - overloadErased := substitute(emptyMappings, overloadFunction, true) - for _, existing := range function.GetOverloads() { - existingFunction := decls.NewFunctionType(existing.GetResultType(), existing.GetParams()...) - existingErased := substitute(emptyMappings, existingFunction, true) - overlap := isAssignable(emptyMappings, overloadErased, existingErased) != nil || - isAssignable(emptyMappings, existingErased, overloadErased) != nil - if overlap && - overload.GetIsInstanceFunction() == existing.GetIsInstanceFunction() { - errMsgs = append(errMsgs, - overlappingOverloadError(f.Name, - overload.GetOverloadId(), overloadFunction, - existing.GetOverloadId(), existingFunction)) - } - } - - for _, macro := range parser.AllMacros { - if macro.Function() == f.Name && - macro.IsReceiverStyle() == overload.GetIsInstanceFunction() && - macro.ArgCount() == len(overload.GetParams()) { - errMsgs = append(errMsgs, overlappingMacroError(f.Name, macro.ArgCount())) - } - } - if len(errMsgs) > 0 { - return errMsgs - } - function.Overloads = append(function.GetOverloads(), overload) - return errMsgs -} - // setFunction adds the function Decl to the Env. // Adds a function decl if one doesn't already exist, then adds all overloads from the Decl. // If overload overlaps with an existing overload, adds to the errors in the Env instead. -func (e *Env) setFunction(decl *exprpb.Decl) []errorMsg { - errorMsgs := make([]errorMsg, 0) - overloads := decl.GetFunction().GetOverloads() - current := e.declarations.FindFunction(decl.Name) - if current == nil { - //Add the function declaration without overloads and check the overloads below. - current = decls.NewFunction(decl.Name) - } else { - existingOverloads := map[string]*exprpb.Decl_FunctionDecl_Overload{} - for _, overload := range current.GetFunction().GetOverloads() { - existingOverloads[overload.GetOverloadId()] = overload +func (e *Env) setFunction(fn *decls.FunctionDecl) []errorMsg { + errMsgs := make([]errorMsg, 0) + current := e.declarations.FindFunction(fn.Name()) + if current != nil { + var err error + current, err = current.Merge(fn) + if err != nil { + return append(errMsgs, errorMsg(err.Error())) } - newOverloads := []*exprpb.Decl_FunctionDecl_Overload{} - for _, overload := range overloads { - existing, found := existingOverloads[overload.GetOverloadId()] - if !found || !overloadsEqual(existing, overload) { - newOverloads = append(newOverloads, overload) + } else { + current = fn + } + for _, overload := range current.OverloadDecls() { + for _, macro := range parser.AllMacros { + if macro.Function() == current.Name() && + macro.IsReceiverStyle() == overload.IsMemberFunction() && + macro.ArgCount() == len(overload.ArgTypes()) { + errMsgs = append(errMsgs, overlappingMacroError(current.Name(), macro.ArgCount())) } } - overloads = newOverloads - if len(newOverloads) == 0 { - return errorMsgs + if len(errMsgs) > 0 { + return errMsgs } - // Copy on write since we don't know where this original definition came from. - current = proto.Clone(current).(*exprpb.Decl) } e.declarations.SetFunction(current) - for _, overload := range overloads { - errorMsgs = append(errorMsgs, e.addOverload(current, overload)...) - } - return errorMsgs + return errMsgs } // addIdent adds the Decl to the declarations in the Env. // Returns a non-empty errorMsg if the identifier is already declared in the scope. -func (e *Env) addIdent(decl *exprpb.Decl) errorMsg { - current := e.declarations.FindIdentInScope(decl.Name) +func (e *Env) addIdent(decl *decls.VariableDecl) errorMsg { + current := e.declarations.FindIdentInScope(decl.Name()) if current != nil { - if proto.Equal(current, decl) { + if current.DeclarationIsEquivalent(decl) { return "" } - return overlappingIdentifierError(decl.Name) + return overlappingIdentifierError(decl.Name()) } e.declarations.AddIdent(decl) return "" @@ -264,111 +219,9 @@ func (e *Env) isOverloadDisabled(overloadID string) bool { return found } -// overloadsEqual returns whether two overloads have identical signatures. -// -// type parameter names are ignored as they may be specified in any order and have no bearing on overload -// equivalence -func overloadsEqual(o1, o2 *exprpb.Decl_FunctionDecl_Overload) bool { - return o1.GetOverloadId() == o2.GetOverloadId() && - o1.GetIsInstanceFunction() == o2.GetIsInstanceFunction() && - paramsEqual(o1.GetParams(), o2.GetParams()) && - proto.Equal(o1.GetResultType(), o2.GetResultType()) -} - -// paramsEqual returns whether two lists have equal length and all types are equal -func paramsEqual(p1, p2 []*exprpb.Type) bool { - if len(p1) != len(p2) { - return false - } - for i, a := range p1 { - b := p2[i] - if !proto.Equal(a, b) { - return false - } - } - return true -} - -// sanitizeFunction replaces well-known types referenced by message name with their equivalent -// CEL built-in type instances. -func sanitizeFunction(decl *exprpb.Decl) *exprpb.Decl { - fn := decl.GetFunction() - // Determine whether the declaration requires replacements from proto-based message type - // references to well-known CEL type references. - var needsSanitizing bool - for _, o := range fn.GetOverloads() { - if isObjectWellKnownType(o.GetResultType()) { - needsSanitizing = true - break - } - for _, p := range o.GetParams() { - if isObjectWellKnownType(p) { - needsSanitizing = true - break - } - } - } - - // Early return if the declaration requires no modification. - if !needsSanitizing { - return decl - } - - // Sanitize all of the overloads if any overload requires an update to its type references. - overloads := make([]*exprpb.Decl_FunctionDecl_Overload, len(fn.GetOverloads())) - for i, o := range fn.GetOverloads() { - rt := o.GetResultType() - if isObjectWellKnownType(rt) { - rt = getObjectWellKnownType(rt) - } - params := make([]*exprpb.Type, len(o.GetParams())) - copy(params, o.GetParams()) - for j, p := range params { - if isObjectWellKnownType(p) { - params[j] = getObjectWellKnownType(p) - } - } - // If sanitized, replace the overload definition. - if o.IsInstanceFunction { - overloads[i] = - decls.NewInstanceOverload(o.GetOverloadId(), params, rt) - } else { - overloads[i] = - decls.NewOverload(o.GetOverloadId(), params, rt) - } - } - return decls.NewFunction(decl.GetName(), overloads...) -} - -// sanitizeIdent replaces the identifier's well-known types referenced by message name with -// references to CEL built-in type instances. -func sanitizeIdent(decl *exprpb.Decl) *exprpb.Decl { - id := decl.GetIdent() - t := id.GetType() - if !isObjectWellKnownType(t) { - return decl - } - return decls.NewIdent(decl.GetName(), getObjectWellKnownType(t), id.GetValue()) -} - -// isObjectWellKnownType returns true if the input type is an OBJECT type with a message name -// that corresponds the message name of a built-in CEL type. -func isObjectWellKnownType(t *exprpb.Type) bool { - if kindOf(t) != kindObject { - return false - } - _, found := pb.CheckedWellKnowns[t.GetMessageType()] - return found -} - -// getObjectWellKnownType returns the built-in CEL type declaration for input type's message name. -func getObjectWellKnownType(t *exprpb.Type) *exprpb.Type { - return pb.CheckedWellKnowns[t.GetMessageType()] -} - // validatedDeclarations returns a reference to the validated variable and function declaration scope stack. // must be copied before use. -func (e *Env) validatedDeclarations() *decls.Scopes { +func (e *Env) validatedDeclarations() *Scopes { return e.declarations } @@ -402,19 +255,6 @@ func overlappingIdentifierError(name string) errorMsg { return errorMsg(fmt.Sprintf("overlapping identifier for name '%s'", name)) } -func overlappingOverloadError(name string, - overloadID1 string, f1 *exprpb.Type, - overloadID2 string, f2 *exprpb.Type) errorMsg { - return errorMsg(fmt.Sprintf( - "overlapping overload for name '%s' (type '%s' with overloadId: '%s' "+ - "cannot be distinguished from '%s' with overloadId: '%s')", - name, - FormatCheckedType(f1), - overloadID1, - FormatCheckedType(f2), - overloadID2)) -} - func overlappingMacroError(name string, argCount int) errorMsg { return errorMsg(fmt.Sprintf( "overlapping macro for name '%s' with %d args", name, argCount)) diff --git a/vendor/github.com/google/cel-go/checker/errors.go b/vendor/github.com/google/cel-go/checker/errors.go index 0014f9abe..8b3bf0b8b 100644 --- a/vendor/github.com/google/cel-go/checker/errors.go +++ b/vendor/github.com/google/cel-go/checker/errors.go @@ -16,81 +16,73 @@ package checker import ( "github.com/google/cel-go/common" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" ) // typeErrors is a specialization of Errors. type typeErrors struct { - *common.Errors + errs *common.Errors +} + +func (e *typeErrors) fieldTypeMismatch(id int64, l common.Location, name string, field, value *types.Type) { + e.errs.ReportErrorAtID(id, l, "expected type of field '%s' is '%s' but provided type is '%s'", + name, FormatCELType(field), FormatCELType(value)) +} + +func (e *typeErrors) incompatibleType(id int64, l common.Location, ex ast.Expr, prev, next *types.Type) { + e.errs.ReportErrorAtID(id, l, + "incompatible type already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next) +} + +func (e *typeErrors) noMatchingOverload(id int64, l common.Location, name string, args []*types.Type, isInstance bool) { + signature := formatFunctionDeclType(nil, args, isInstance) + e.errs.ReportErrorAtID(id, l, "found no matching overload for '%s' applied to '%s'", name, signature) } -func (e *typeErrors) undeclaredReference(l common.Location, container string, name string) { - e.ReportError(l, "undeclared reference to '%s' (in container '%s')", name, container) +func (e *typeErrors) notAComprehensionRange(id int64, l common.Location, t *types.Type) { + e.errs.ReportErrorAtID(id, l, "expression of type '%s' cannot be range of a comprehension (must be list, map, or dynamic)", + FormatCELType(t)) } -func (e *typeErrors) typeDoesNotSupportFieldSelection(l common.Location, t *exprpb.Type) { - e.ReportError(l, "type '%s' does not support field selection", t) +func (e *typeErrors) notAnOptionalFieldSelection(id int64, l common.Location, field ast.Expr) { + e.errs.ReportErrorAtID(id, l, "unsupported optional field selection: %v", field) } -func (e *typeErrors) undefinedField(l common.Location, field string) { - e.ReportError(l, "undefined field '%s'", field) +func (e *typeErrors) notAType(id int64, l common.Location, typeName string) { + e.errs.ReportErrorAtID(id, l, "'%s' is not a type", typeName) } -func (e *typeErrors) noMatchingOverload(l common.Location, name string, args []*exprpb.Type, isInstance bool) { - signature := formatFunction(nil, args, isInstance) - e.ReportError(l, "found no matching overload for '%s' applied to '%s'", name, signature) +func (e *typeErrors) notAMessageType(id int64, l common.Location, typeName string) { + e.errs.ReportErrorAtID(id, l, "'%s' is not a message type", typeName) } -func (e *typeErrors) notAType(l common.Location, t *exprpb.Type) { - e.ReportError(l, "'%s(%v)' is not a type", FormatCheckedType(t), t) +func (e *typeErrors) referenceRedefinition(id int64, l common.Location, ex ast.Expr, prev, next *ast.ReferenceInfo) { + e.errs.ReportErrorAtID(id, l, + "reference already exists for expression: %v(%d) old:%v, new:%v", ex, ex.ID(), prev, next) } -func (e *typeErrors) notAMessageType(l common.Location, t *exprpb.Type) { - e.ReportError(l, "'%s' is not a message type", FormatCheckedType(t)) +func (e *typeErrors) typeDoesNotSupportFieldSelection(id int64, l common.Location, t *types.Type) { + e.errs.ReportErrorAtID(id, l, "type '%s' does not support field selection", FormatCELType(t)) } -func (e *typeErrors) fieldTypeMismatch(l common.Location, name string, field *exprpb.Type, value *exprpb.Type) { - e.ReportError(l, "expected type of field '%s' is '%s' but provided type is '%s'", - name, FormatCheckedType(field), FormatCheckedType(value)) +func (e *typeErrors) typeMismatch(id int64, l common.Location, expected, actual *types.Type) { + e.errs.ReportErrorAtID(id, l, "expected type '%s' but found '%s'", + FormatCELType(expected), FormatCELType(actual)) } -func (e *typeErrors) unexpectedFailedResolution(l common.Location, typeName string) { - e.ReportError(l, "[internal] unexpected failed resolution of '%s'", typeName) +func (e *typeErrors) undefinedField(id int64, l common.Location, field string) { + e.errs.ReportErrorAtID(id, l, "undefined field '%s'", field) } -func (e *typeErrors) notAComprehensionRange(l common.Location, t *exprpb.Type) { - e.ReportError(l, "expression of type '%s' cannot be range of a comprehension (must be list, map, or dynamic)", - FormatCheckedType(t)) +func (e *typeErrors) undeclaredReference(id int64, l common.Location, container string, name string) { + e.errs.ReportErrorAtID(id, l, "undeclared reference to '%s' (in container '%s')", name, container) } -func (e *typeErrors) typeMismatch(l common.Location, expected *exprpb.Type, actual *exprpb.Type) { - e.ReportError(l, "expected type '%s' but found '%s'", - FormatCheckedType(expected), FormatCheckedType(actual)) +func (e *typeErrors) unexpectedFailedResolution(id int64, l common.Location, typeName string) { + e.errs.ReportErrorAtID(id, l, "unexpected failed resolution of '%s'", typeName) } -func formatFunction(resultType *exprpb.Type, argTypes []*exprpb.Type, isInstance bool) string { - result := "" - if isInstance { - target := argTypes[0] - argTypes = argTypes[1:] - - result += FormatCheckedType(target) - result += "." - } - - result += "(" - for i, arg := range argTypes { - if i > 0 { - result += ", " - } - result += FormatCheckedType(arg) - } - result += ")" - if resultType != nil { - result += " -> " - result += FormatCheckedType(resultType) - } - - return result +func (e *typeErrors) unexpectedASTType(id int64, l common.Location, kind, typeName string) { + e.errs.ReportErrorAtID(id, l, "unexpected %s type: %v", kind, typeName) } diff --git a/vendor/github.com/google/cel-go/checker/format.go b/vendor/github.com/google/cel-go/checker/format.go new file mode 100644 index 000000000..95842905e --- /dev/null +++ b/vendor/github.com/google/cel-go/checker/format.go @@ -0,0 +1,216 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package checker + +import ( + "fmt" + "strings" + + chkdecls "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/types" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +const ( + kindUnknown = iota + 1 + kindError + kindFunction + kindDyn + kindPrimitive + kindWellKnown + kindWrapper + kindNull + kindAbstract + kindType + kindList + kindMap + kindObject + kindTypeParam +) + +// FormatCheckedType converts a type message into a string representation. +func FormatCheckedType(t *exprpb.Type) string { + switch kindOf(t) { + case kindDyn: + return "dyn" + case kindFunction: + return formatFunctionExprType(t.GetFunction().GetResultType(), + t.GetFunction().GetArgTypes(), + false) + case kindList: + return fmt.Sprintf("list(%s)", FormatCheckedType(t.GetListType().GetElemType())) + case kindObject: + return t.GetMessageType() + case kindMap: + return fmt.Sprintf("map(%s, %s)", + FormatCheckedType(t.GetMapType().GetKeyType()), + FormatCheckedType(t.GetMapType().GetValueType())) + case kindNull: + return "null" + case kindPrimitive: + switch t.GetPrimitive() { + case exprpb.Type_UINT64: + return "uint" + case exprpb.Type_INT64: + return "int" + } + return strings.Trim(strings.ToLower(t.GetPrimitive().String()), " ") + case kindType: + if t.GetType() == nil || t.GetType().GetTypeKind() == nil { + return "type" + } + return fmt.Sprintf("type(%s)", FormatCheckedType(t.GetType())) + case kindWellKnown: + switch t.GetWellKnown() { + case exprpb.Type_ANY: + return "any" + case exprpb.Type_DURATION: + return "duration" + case exprpb.Type_TIMESTAMP: + return "timestamp" + } + case kindWrapper: + return fmt.Sprintf("wrapper(%s)", + FormatCheckedType(chkdecls.NewPrimitiveType(t.GetWrapper()))) + case kindError: + return "!error!" + case kindTypeParam: + return t.GetTypeParam() + case kindAbstract: + at := t.GetAbstractType() + params := at.GetParameterTypes() + paramStrs := make([]string, len(params)) + for i, p := range params { + paramStrs[i] = FormatCheckedType(p) + } + return fmt.Sprintf("%s(%s)", at.GetName(), strings.Join(paramStrs, ", ")) + } + return t.String() +} + +type formatter func(any) string + +// FormatCELType formats a types.Type value to a string representation. +// +// The type formatting is identical to FormatCheckedType. +func FormatCELType(t any) string { + dt := t.(*types.Type) + switch dt.Kind() { + case types.AnyKind: + return "any" + case types.DurationKind: + return "duration" + case types.ErrorKind: + return "!error!" + case types.NullTypeKind: + return "null" + case types.TimestampKind: + return "timestamp" + case types.TypeParamKind: + return dt.TypeName() + case types.OpaqueKind: + if dt.TypeName() == "function" { + // There is no explicit function type in the new types representation, so information like + // whether the function is a member function is absent. + return formatFunctionDeclType(dt.Parameters()[0], dt.Parameters()[1:], false) + } + case types.UnspecifiedKind: + return "" + } + if len(dt.Parameters()) == 0 { + return dt.DeclaredTypeName() + } + paramTypeNames := make([]string, 0, len(dt.Parameters())) + for _, p := range dt.Parameters() { + paramTypeNames = append(paramTypeNames, FormatCELType(p)) + } + return fmt.Sprintf("%s(%s)", dt.TypeName(), strings.Join(paramTypeNames, ", ")) +} + +func formatExprType(t any) string { + if t == nil { + return "" + } + return FormatCheckedType(t.(*exprpb.Type)) +} + +func formatFunctionExprType(resultType *exprpb.Type, argTypes []*exprpb.Type, isInstance bool) string { + return formatFunctionInternal[*exprpb.Type](resultType, argTypes, isInstance, formatExprType) +} + +func formatFunctionDeclType(resultType *types.Type, argTypes []*types.Type, isInstance bool) string { + return formatFunctionInternal[*types.Type](resultType, argTypes, isInstance, FormatCELType) +} + +func formatFunctionInternal[T any](resultType T, argTypes []T, isInstance bool, format formatter) string { + result := "" + if isInstance { + target := argTypes[0] + argTypes = argTypes[1:] + result += format(target) + result += "." + } + result += "(" + for i, arg := range argTypes { + if i > 0 { + result += ", " + } + result += format(arg) + } + result += ")" + rt := format(resultType) + if rt != "" { + result += " -> " + result += rt + } + return result +} + +// kindOf returns the kind of the type as defined in the checked.proto. +func kindOf(t *exprpb.Type) int { + if t == nil || t.TypeKind == nil { + return kindUnknown + } + switch t.GetTypeKind().(type) { + case *exprpb.Type_Error: + return kindError + case *exprpb.Type_Function: + return kindFunction + case *exprpb.Type_Dyn: + return kindDyn + case *exprpb.Type_Primitive: + return kindPrimitive + case *exprpb.Type_WellKnown: + return kindWellKnown + case *exprpb.Type_Wrapper: + return kindWrapper + case *exprpb.Type_Null: + return kindNull + case *exprpb.Type_Type: + return kindType + case *exprpb.Type_ListType_: + return kindList + case *exprpb.Type_MapType_: + return kindMap + case *exprpb.Type_MessageType: + return kindObject + case *exprpb.Type_TypeParam: + return kindTypeParam + case *exprpb.Type_AbstractType_: + return kindAbstract + } + return kindUnknown +} diff --git a/vendor/github.com/google/cel-go/checker/mapping.go b/vendor/github.com/google/cel-go/checker/mapping.go index fbc55a28d..8163a908a 100644 --- a/vendor/github.com/google/cel-go/checker/mapping.go +++ b/vendor/github.com/google/cel-go/checker/mapping.go @@ -15,25 +15,25 @@ package checker import ( - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" ) type mapping struct { - mapping map[string]*exprpb.Type + mapping map[string]*types.Type } func newMapping() *mapping { return &mapping{ - mapping: make(map[string]*exprpb.Type), + mapping: make(map[string]*types.Type), } } -func (m *mapping) add(from *exprpb.Type, to *exprpb.Type) { - m.mapping[typeKey(from)] = to +func (m *mapping) add(from, to *types.Type) { + m.mapping[FormatCELType(from)] = to } -func (m *mapping) find(from *exprpb.Type) (*exprpb.Type, bool) { - if r, found := m.mapping[typeKey(from)]; found { +func (m *mapping) find(from *types.Type) (*types.Type, bool) { + if r, found := m.mapping[FormatCELType(from)]; found { return r, found } return nil, false diff --git a/vendor/github.com/google/cel-go/checker/options.go b/vendor/github.com/google/cel-go/checker/options.go index cded00a66..0560c3813 100644 --- a/vendor/github.com/google/cel-go/checker/options.go +++ b/vendor/github.com/google/cel-go/checker/options.go @@ -14,12 +14,10 @@ package checker -import "github.com/google/cel-go/checker/decls" - type options struct { crossTypeNumericComparisons bool homogeneousAggregateLiterals bool - validatedDeclarations *decls.Scopes + validatedDeclarations *Scopes } // Option is a functional option for configuring the type-checker @@ -34,15 +32,6 @@ func CrossTypeNumericComparisons(enabled bool) Option { } } -// HomogeneousAggregateLiterals toggles support for constructing lists and maps whose elements all -// have the same type. -func HomogeneousAggregateLiterals(enabled bool) Option { - return func(opts *options) error { - opts.homogeneousAggregateLiterals = enabled - return nil - } -} - // ValidatedDeclarations provides a references to validated declarations which will be copied // into new checker instances. func ValidatedDeclarations(env *Env) Option { diff --git a/vendor/github.com/google/cel-go/checker/printer.go b/vendor/github.com/google/cel-go/checker/printer.go index 0cecc5210..7a3984f02 100644 --- a/vendor/github.com/google/cel-go/checker/printer.go +++ b/vendor/github.com/google/cel-go/checker/printer.go @@ -15,39 +15,42 @@ package checker import ( - "github.com/google/cel-go/common/debug" + "sort" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/debug" ) type semanticAdorner struct { - checks *exprpb.CheckedExpr + checked *ast.AST } var _ debug.Adorner = &semanticAdorner{} func (a *semanticAdorner) GetMetadata(elem any) string { result := "" - e, isExpr := elem.(*exprpb.Expr) + e, isExpr := elem.(ast.Expr) if !isExpr { return result } - t := a.checks.TypeMap[e.GetId()] + t := a.checked.TypeMap()[e.ID()] if t != nil { result += "~" - result += FormatCheckedType(t) + result += FormatCELType(t) } - switch e.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr, - *exprpb.Expr_CallExpr, - *exprpb.Expr_StructExpr, - *exprpb.Expr_SelectExpr: - if ref, found := a.checks.ReferenceMap[e.GetId()]; found { - if len(ref.GetOverloadId()) == 0 { + switch e.Kind() { + case ast.IdentKind, + ast.CallKind, + ast.ListKind, + ast.StructKind, + ast.SelectKind: + if ref, found := a.checked.ReferenceMap()[e.ID()]; found { + if len(ref.OverloadIDs) == 0 { result += "^" + ref.Name } else { - for i, overload := range ref.GetOverloadId() { + sort.Strings(ref.OverloadIDs) + for i, overload := range ref.OverloadIDs { if i == 0 { result += "^" } else { @@ -65,7 +68,7 @@ func (a *semanticAdorner) GetMetadata(elem any) string { // Print returns a string representation of the Expr message, // annotated with types from the CheckedExpr. The Expr must // be a sub-expression embedded in the CheckedExpr. -func Print(e *exprpb.Expr, checks *exprpb.CheckedExpr) string { - a := &semanticAdorner{checks: checks} +func Print(e ast.Expr, checked *ast.AST) string { + a := &semanticAdorner{checked: checked} return debug.ToAdornedDebugString(e, a) } diff --git a/vendor/github.com/google/cel-go/checker/decls/scopes.go b/vendor/github.com/google/cel-go/checker/scopes.go similarity index 81% rename from vendor/github.com/google/cel-go/checker/decls/scopes.go rename to vendor/github.com/google/cel-go/checker/scopes.go index 608bca3e5..8bb73ddb6 100644 --- a/vendor/github.com/google/cel-go/checker/decls/scopes.go +++ b/vendor/github.com/google/cel-go/checker/scopes.go @@ -12,9 +12,11 @@ // See the License for the specific language governing permissions and // limitations under the License. -package decls +package checker -import exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +import ( + "github.com/google/cel-go/common/decls" +) // Scopes represents nested Decl sets where the Scopes value contains a Groups containing all // identifiers in scope and an optional parent representing outer scopes. @@ -25,9 +27,9 @@ type Scopes struct { scopes *Group } -// NewScopes creates a new, empty Scopes. +// newScopes creates a new, empty Scopes. // Some operations can't be safely performed until a Group is added with Push. -func NewScopes() *Scopes { +func newScopes() *Scopes { return &Scopes{ scopes: newGroup(), } @@ -35,7 +37,7 @@ func NewScopes() *Scopes { // Copy creates a copy of the current Scopes values, including a copy of its parent if non-nil. func (s *Scopes) Copy() *Scopes { - cpy := NewScopes() + cpy := newScopes() if s == nil { return cpy } @@ -66,14 +68,14 @@ func (s *Scopes) Pop() *Scopes { // AddIdent adds the ident Decl in the current scope. // Note: If the name collides with an existing identifier in the scope, the Decl is overwritten. -func (s *Scopes) AddIdent(decl *exprpb.Decl) { - s.scopes.idents[decl.Name] = decl +func (s *Scopes) AddIdent(decl *decls.VariableDecl) { + s.scopes.idents[decl.Name()] = decl } // FindIdent finds the first ident Decl with a matching name in Scopes, or nil if one cannot be // found. // Note: The search is performed from innermost to outermost. -func (s *Scopes) FindIdent(name string) *exprpb.Decl { +func (s *Scopes) FindIdent(name string) *decls.VariableDecl { if ident, found := s.scopes.idents[name]; found { return ident } @@ -86,7 +88,7 @@ func (s *Scopes) FindIdent(name string) *exprpb.Decl { // FindIdentInScope finds the first ident Decl with a matching name in the current Scopes value, or // nil if one does not exist. // Note: The search is only performed on the current scope and does not search outer scopes. -func (s *Scopes) FindIdentInScope(name string) *exprpb.Decl { +func (s *Scopes) FindIdentInScope(name string) *decls.VariableDecl { if ident, found := s.scopes.idents[name]; found { return ident } @@ -95,14 +97,14 @@ func (s *Scopes) FindIdentInScope(name string) *exprpb.Decl { // SetFunction adds the function Decl to the current scope. // Note: Any previous entry for a function in the current scope with the same name is overwritten. -func (s *Scopes) SetFunction(fn *exprpb.Decl) { - s.scopes.functions[fn.Name] = fn +func (s *Scopes) SetFunction(fn *decls.FunctionDecl) { + s.scopes.functions[fn.Name()] = fn } // FindFunction finds the first function Decl with a matching name in Scopes. // The search is performed from innermost to outermost. // Returns nil if no such function in Scopes. -func (s *Scopes) FindFunction(name string) *exprpb.Decl { +func (s *Scopes) FindFunction(name string) *decls.FunctionDecl { if fn, found := s.scopes.functions[name]; found { return fn } @@ -116,16 +118,16 @@ func (s *Scopes) FindFunction(name string) *exprpb.Decl { // Contains separate namespaces for identifier and function Decls. // (Should be named "Scope" perhaps?) type Group struct { - idents map[string]*exprpb.Decl - functions map[string]*exprpb.Decl + idents map[string]*decls.VariableDecl + functions map[string]*decls.FunctionDecl } // copy creates a new Group instance with a shallow copy of the variables and functions. // If callers need to mutate the exprpb.Decl definitions for a Function, they should copy-on-write. func (g *Group) copy() *Group { cpy := &Group{ - idents: make(map[string]*exprpb.Decl, len(g.idents)), - functions: make(map[string]*exprpb.Decl, len(g.functions)), + idents: make(map[string]*decls.VariableDecl, len(g.idents)), + functions: make(map[string]*decls.FunctionDecl, len(g.functions)), } for n, id := range g.idents { cpy.idents[n] = id @@ -139,7 +141,7 @@ func (g *Group) copy() *Group { // newGroup creates a new Group with empty maps for identifiers and functions. func newGroup() *Group { return &Group{ - idents: make(map[string]*exprpb.Decl), - functions: make(map[string]*exprpb.Decl), + idents: make(map[string]*decls.VariableDecl), + functions: make(map[string]*decls.FunctionDecl), } } diff --git a/vendor/github.com/google/cel-go/checker/standard.go b/vendor/github.com/google/cel-go/checker/standard.go index e64337ba4..11b35b80e 100644 --- a/vendor/github.com/google/cel-go/checker/standard.go +++ b/vendor/github.com/google/cel-go/checker/standard.go @@ -15,480 +15,21 @@ package checker import ( - "github.com/google/cel-go/checker/decls" - "github.com/google/cel-go/common/operators" - "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/stdlib" exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) -var ( - standardDeclarations []*exprpb.Decl -) - -func init() { - // Some shortcuts we use when building declarations. - paramA := decls.NewTypeParamType("A") - typeParamAList := []string{"A"} - listOfA := decls.NewListType(paramA) - paramB := decls.NewTypeParamType("B") - typeParamABList := []string{"A", "B"} - mapOfAB := decls.NewMapType(paramA, paramB) - - var idents []*exprpb.Decl - for _, t := range []*exprpb.Type{ - decls.Int, decls.Uint, decls.Bool, - decls.Double, decls.Bytes, decls.String} { - idents = append(idents, - decls.NewVar(FormatCheckedType(t), decls.NewTypeType(t))) - } - idents = append(idents, - decls.NewVar("list", decls.NewTypeType(listOfA)), - decls.NewVar("map", decls.NewTypeType(mapOfAB)), - decls.NewVar("null_type", decls.NewTypeType(decls.Null)), - decls.NewVar("type", decls.NewTypeType(decls.NewTypeType(nil)))) - - standardDeclarations = append(standardDeclarations, idents...) - standardDeclarations = append(standardDeclarations, []*exprpb.Decl{ - // Booleans - decls.NewFunction(operators.Conditional, - decls.NewParameterizedOverload(overloads.Conditional, - []*exprpb.Type{decls.Bool, paramA, paramA}, paramA, - typeParamAList)), - - decls.NewFunction(operators.LogicalAnd, - decls.NewOverload(overloads.LogicalAnd, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool)), - - decls.NewFunction(operators.LogicalOr, - decls.NewOverload(overloads.LogicalOr, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool)), - - decls.NewFunction(operators.LogicalNot, - decls.NewOverload(overloads.LogicalNot, - []*exprpb.Type{decls.Bool}, decls.Bool)), - - decls.NewFunction(operators.NotStrictlyFalse, - decls.NewOverload(overloads.NotStrictlyFalse, - []*exprpb.Type{decls.Bool}, decls.Bool)), - - decls.NewFunction(operators.Equals, - decls.NewParameterizedOverload(overloads.Equals, - []*exprpb.Type{paramA, paramA}, decls.Bool, - typeParamAList)), - - decls.NewFunction(operators.NotEquals, - decls.NewParameterizedOverload(overloads.NotEquals, - []*exprpb.Type{paramA, paramA}, decls.Bool, - typeParamAList)), - - // Algebra. - - decls.NewFunction(operators.Subtract, - decls.NewOverload(overloads.SubtractInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Int), - decls.NewOverload(overloads.SubtractUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Uint), - decls.NewOverload(overloads.SubtractDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Double), - decls.NewOverload(overloads.SubtractTimestampTimestamp, - []*exprpb.Type{decls.Timestamp, decls.Timestamp}, decls.Duration), - decls.NewOverload(overloads.SubtractTimestampDuration, - []*exprpb.Type{decls.Timestamp, decls.Duration}, decls.Timestamp), - decls.NewOverload(overloads.SubtractDurationDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Duration)), - - decls.NewFunction(operators.Multiply, - decls.NewOverload(overloads.MultiplyInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Int), - decls.NewOverload(overloads.MultiplyUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Uint), - decls.NewOverload(overloads.MultiplyDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Double)), - - decls.NewFunction(operators.Divide, - decls.NewOverload(overloads.DivideInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Int), - decls.NewOverload(overloads.DivideUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Uint), - decls.NewOverload(overloads.DivideDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Double)), - - decls.NewFunction(operators.Modulo, - decls.NewOverload(overloads.ModuloInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Int), - decls.NewOverload(overloads.ModuloUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Uint)), - - decls.NewFunction(operators.Add, - decls.NewOverload(overloads.AddInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Int), - decls.NewOverload(overloads.AddUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Uint), - decls.NewOverload(overloads.AddDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Double), - decls.NewOverload(overloads.AddString, - []*exprpb.Type{decls.String, decls.String}, decls.String), - decls.NewOverload(overloads.AddBytes, - []*exprpb.Type{decls.Bytes, decls.Bytes}, decls.Bytes), - decls.NewParameterizedOverload(overloads.AddList, - []*exprpb.Type{listOfA, listOfA}, listOfA, - typeParamAList), - decls.NewOverload(overloads.AddTimestampDuration, - []*exprpb.Type{decls.Timestamp, decls.Duration}, decls.Timestamp), - decls.NewOverload(overloads.AddDurationTimestamp, - []*exprpb.Type{decls.Duration, decls.Timestamp}, decls.Timestamp), - decls.NewOverload(overloads.AddDurationDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Duration)), - - decls.NewFunction(operators.Negate, - decls.NewOverload(overloads.NegateInt64, - []*exprpb.Type{decls.Int}, decls.Int), - decls.NewOverload(overloads.NegateDouble, - []*exprpb.Type{decls.Double}, decls.Double)), - - // Index. - - decls.NewFunction(operators.Index, - decls.NewParameterizedOverload(overloads.IndexList, - []*exprpb.Type{listOfA, decls.Int}, paramA, - typeParamAList), - decls.NewParameterizedOverload(overloads.IndexMap, - []*exprpb.Type{mapOfAB, paramA}, paramB, - typeParamABList)), - - // Collections. - - decls.NewFunction(overloads.Size, - decls.NewInstanceOverload(overloads.SizeStringInst, - []*exprpb.Type{decls.String}, decls.Int), - decls.NewInstanceOverload(overloads.SizeBytesInst, - []*exprpb.Type{decls.Bytes}, decls.Int), - decls.NewParameterizedInstanceOverload(overloads.SizeListInst, - []*exprpb.Type{listOfA}, decls.Int, typeParamAList), - decls.NewParameterizedInstanceOverload(overloads.SizeMapInst, - []*exprpb.Type{mapOfAB}, decls.Int, typeParamABList), - decls.NewOverload(overloads.SizeString, - []*exprpb.Type{decls.String}, decls.Int), - decls.NewOverload(overloads.SizeBytes, - []*exprpb.Type{decls.Bytes}, decls.Int), - decls.NewParameterizedOverload(overloads.SizeList, - []*exprpb.Type{listOfA}, decls.Int, typeParamAList), - decls.NewParameterizedOverload(overloads.SizeMap, - []*exprpb.Type{mapOfAB}, decls.Int, typeParamABList)), - - decls.NewFunction(operators.In, - decls.NewParameterizedOverload(overloads.InList, - []*exprpb.Type{paramA, listOfA}, decls.Bool, - typeParamAList), - decls.NewParameterizedOverload(overloads.InMap, - []*exprpb.Type{paramA, mapOfAB}, decls.Bool, - typeParamABList)), - - // Deprecated 'in()' function. - - decls.NewFunction(overloads.DeprecatedIn, - decls.NewParameterizedOverload(overloads.InList, - []*exprpb.Type{paramA, listOfA}, decls.Bool, - typeParamAList), - decls.NewParameterizedOverload(overloads.InMap, - []*exprpb.Type{paramA, mapOfAB}, decls.Bool, - typeParamABList)), - - // Conversions to type. - - decls.NewFunction(overloads.TypeConvertType, - decls.NewParameterizedOverload(overloads.TypeConvertType, - []*exprpb.Type{paramA}, decls.NewTypeType(paramA), typeParamAList)), - - // Conversions to int. - - decls.NewFunction(overloads.TypeConvertInt, - decls.NewOverload(overloads.IntToInt, []*exprpb.Type{decls.Int}, decls.Int), - decls.NewOverload(overloads.UintToInt, []*exprpb.Type{decls.Uint}, decls.Int), - decls.NewOverload(overloads.DoubleToInt, []*exprpb.Type{decls.Double}, decls.Int), - decls.NewOverload(overloads.StringToInt, []*exprpb.Type{decls.String}, decls.Int), - decls.NewOverload(overloads.TimestampToInt, []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewOverload(overloads.DurationToInt, []*exprpb.Type{decls.Duration}, decls.Int)), - - // Conversions to uint. - - decls.NewFunction(overloads.TypeConvertUint, - decls.NewOverload(overloads.UintToUint, []*exprpb.Type{decls.Uint}, decls.Uint), - decls.NewOverload(overloads.IntToUint, []*exprpb.Type{decls.Int}, decls.Uint), - decls.NewOverload(overloads.DoubleToUint, []*exprpb.Type{decls.Double}, decls.Uint), - decls.NewOverload(overloads.StringToUint, []*exprpb.Type{decls.String}, decls.Uint)), - - // Conversions to double. - - decls.NewFunction(overloads.TypeConvertDouble, - decls.NewOverload(overloads.DoubleToDouble, []*exprpb.Type{decls.Double}, decls.Double), - decls.NewOverload(overloads.IntToDouble, []*exprpb.Type{decls.Int}, decls.Double), - decls.NewOverload(overloads.UintToDouble, []*exprpb.Type{decls.Uint}, decls.Double), - decls.NewOverload(overloads.StringToDouble, []*exprpb.Type{decls.String}, decls.Double)), - - // Conversions to bool. - - decls.NewFunction(overloads.TypeConvertBool, - decls.NewOverload(overloads.BoolToBool, []*exprpb.Type{decls.Bool}, decls.Bool), - decls.NewOverload(overloads.StringToBool, []*exprpb.Type{decls.String}, decls.Bool)), - - // Conversions to string. - - decls.NewFunction(overloads.TypeConvertString, - decls.NewOverload(overloads.StringToString, []*exprpb.Type{decls.String}, decls.String), - decls.NewOverload(overloads.BoolToString, []*exprpb.Type{decls.Bool}, decls.String), - decls.NewOverload(overloads.IntToString, []*exprpb.Type{decls.Int}, decls.String), - decls.NewOverload(overloads.UintToString, []*exprpb.Type{decls.Uint}, decls.String), - decls.NewOverload(overloads.DoubleToString, []*exprpb.Type{decls.Double}, decls.String), - decls.NewOverload(overloads.BytesToString, []*exprpb.Type{decls.Bytes}, decls.String), - decls.NewOverload(overloads.TimestampToString, []*exprpb.Type{decls.Timestamp}, decls.String), - decls.NewOverload(overloads.DurationToString, []*exprpb.Type{decls.Duration}, decls.String)), - - // Conversions to bytes. - - decls.NewFunction(overloads.TypeConvertBytes, - decls.NewOverload(overloads.BytesToBytes, []*exprpb.Type{decls.Bytes}, decls.Bytes), - decls.NewOverload(overloads.StringToBytes, []*exprpb.Type{decls.String}, decls.Bytes)), - - // Conversions to timestamps. - - decls.NewFunction(overloads.TypeConvertTimestamp, - decls.NewOverload(overloads.TimestampToTimestamp, - []*exprpb.Type{decls.Timestamp}, decls.Timestamp), - decls.NewOverload(overloads.StringToTimestamp, - []*exprpb.Type{decls.String}, decls.Timestamp), - decls.NewOverload(overloads.IntToTimestamp, - []*exprpb.Type{decls.Int}, decls.Timestamp)), - - // Conversions to durations. - - decls.NewFunction(overloads.TypeConvertDuration, - decls.NewOverload(overloads.DurationToDuration, - []*exprpb.Type{decls.Duration}, decls.Duration), - decls.NewOverload(overloads.StringToDuration, - []*exprpb.Type{decls.String}, decls.Duration), - decls.NewOverload(overloads.IntToDuration, - []*exprpb.Type{decls.Int}, decls.Duration)), - - // Conversions to Dyn. - - decls.NewFunction(overloads.TypeConvertDyn, - decls.NewParameterizedOverload(overloads.ToDyn, - []*exprpb.Type{paramA}, decls.Dyn, - typeParamAList)), - - // String functions. - - decls.NewFunction(overloads.Contains, - decls.NewInstanceOverload(overloads.ContainsString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool)), - decls.NewFunction(overloads.EndsWith, - decls.NewInstanceOverload(overloads.EndsWithString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool)), - decls.NewFunction(overloads.Matches, - decls.NewOverload(overloads.Matches, - []*exprpb.Type{decls.String, decls.String}, decls.Bool), - decls.NewInstanceOverload(overloads.MatchesString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool)), - decls.NewFunction(overloads.StartsWith, - decls.NewInstanceOverload(overloads.StartsWithString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool)), - - // Date/time functions. - - decls.NewFunction(overloads.TimeGetFullYear, - decls.NewInstanceOverload(overloads.TimestampToYear, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToYearWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetMonth, - decls.NewInstanceOverload(overloads.TimestampToMonth, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToMonthWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetDayOfYear, - decls.NewInstanceOverload(overloads.TimestampToDayOfYear, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToDayOfYearWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetDayOfMonth, - decls.NewInstanceOverload(overloads.TimestampToDayOfMonthZeroBased, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToDayOfMonthZeroBasedWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetDate, - decls.NewInstanceOverload(overloads.TimestampToDayOfMonthOneBased, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToDayOfMonthOneBasedWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetDayOfWeek, - decls.NewInstanceOverload(overloads.TimestampToDayOfWeek, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToDayOfWeekWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int)), - - decls.NewFunction(overloads.TimeGetHours, - decls.NewInstanceOverload(overloads.TimestampToHours, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToHoursWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int), - decls.NewInstanceOverload(overloads.DurationToHours, - []*exprpb.Type{decls.Duration}, decls.Int)), - - decls.NewFunction(overloads.TimeGetMinutes, - decls.NewInstanceOverload(overloads.TimestampToMinutes, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToMinutesWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int), - decls.NewInstanceOverload(overloads.DurationToMinutes, - []*exprpb.Type{decls.Duration}, decls.Int)), - - decls.NewFunction(overloads.TimeGetSeconds, - decls.NewInstanceOverload(overloads.TimestampToSeconds, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToSecondsWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int), - decls.NewInstanceOverload(overloads.DurationToSeconds, - []*exprpb.Type{decls.Duration}, decls.Int)), - - decls.NewFunction(overloads.TimeGetMilliseconds, - decls.NewInstanceOverload(overloads.TimestampToMilliseconds, - []*exprpb.Type{decls.Timestamp}, decls.Int), - decls.NewInstanceOverload(overloads.TimestampToMillisecondsWithTz, - []*exprpb.Type{decls.Timestamp, decls.String}, decls.Int), - decls.NewInstanceOverload(overloads.DurationToMilliseconds, - []*exprpb.Type{decls.Duration}, decls.Int)), - - // Relations. - decls.NewFunction(operators.Less, - decls.NewOverload(overloads.LessBool, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool), - decls.NewOverload(overloads.LessInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessInt64Double, - []*exprpb.Type{decls.Int, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessInt64Uint64, - []*exprpb.Type{decls.Int, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessUint64Double, - []*exprpb.Type{decls.Uint, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessUint64Int64, - []*exprpb.Type{decls.Uint, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessDoubleInt64, - []*exprpb.Type{decls.Double, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessDoubleUint64, - []*exprpb.Type{decls.Double, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool), - decls.NewOverload(overloads.LessBytes, - []*exprpb.Type{decls.Bytes, decls.Bytes}, decls.Bool), - decls.NewOverload(overloads.LessTimestamp, - []*exprpb.Type{decls.Timestamp, decls.Timestamp}, decls.Bool), - decls.NewOverload(overloads.LessDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Bool)), - - decls.NewFunction(operators.LessEquals, - decls.NewOverload(overloads.LessEqualsBool, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool), - decls.NewOverload(overloads.LessEqualsInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessEqualsInt64Double, - []*exprpb.Type{decls.Int, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessEqualsInt64Uint64, - []*exprpb.Type{decls.Int, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessEqualsUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessEqualsUint64Double, - []*exprpb.Type{decls.Uint, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessEqualsUint64Int64, - []*exprpb.Type{decls.Uint, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessEqualsDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Bool), - decls.NewOverload(overloads.LessEqualsDoubleInt64, - []*exprpb.Type{decls.Double, decls.Int}, decls.Bool), - decls.NewOverload(overloads.LessEqualsDoubleUint64, - []*exprpb.Type{decls.Double, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.LessEqualsString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool), - decls.NewOverload(overloads.LessEqualsBytes, - []*exprpb.Type{decls.Bytes, decls.Bytes}, decls.Bool), - decls.NewOverload(overloads.LessEqualsTimestamp, - []*exprpb.Type{decls.Timestamp, decls.Timestamp}, decls.Bool), - decls.NewOverload(overloads.LessEqualsDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Bool)), - - decls.NewFunction(operators.Greater, - decls.NewOverload(overloads.GreaterBool, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool), - decls.NewOverload(overloads.GreaterInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterInt64Double, - []*exprpb.Type{decls.Int, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterInt64Uint64, - []*exprpb.Type{decls.Int, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterUint64Double, - []*exprpb.Type{decls.Uint, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterUint64Int64, - []*exprpb.Type{decls.Uint, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterDoubleInt64, - []*exprpb.Type{decls.Double, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterDoubleUint64, - []*exprpb.Type{decls.Double, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool), - decls.NewOverload(overloads.GreaterBytes, - []*exprpb.Type{decls.Bytes, decls.Bytes}, decls.Bool), - decls.NewOverload(overloads.GreaterTimestamp, - []*exprpb.Type{decls.Timestamp, decls.Timestamp}, decls.Bool), - decls.NewOverload(overloads.GreaterDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Bool)), - - decls.NewFunction(operators.GreaterEquals, - decls.NewOverload(overloads.GreaterEqualsBool, - []*exprpb.Type{decls.Bool, decls.Bool}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsInt64, - []*exprpb.Type{decls.Int, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsInt64Double, - []*exprpb.Type{decls.Int, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsInt64Uint64, - []*exprpb.Type{decls.Int, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsUint64, - []*exprpb.Type{decls.Uint, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsUint64Double, - []*exprpb.Type{decls.Uint, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsUint64Int64, - []*exprpb.Type{decls.Uint, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsDouble, - []*exprpb.Type{decls.Double, decls.Double}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsDoubleInt64, - []*exprpb.Type{decls.Double, decls.Int}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsDoubleUint64, - []*exprpb.Type{decls.Double, decls.Uint}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsString, - []*exprpb.Type{decls.String, decls.String}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsBytes, - []*exprpb.Type{decls.Bytes, decls.Bytes}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsTimestamp, - []*exprpb.Type{decls.Timestamp, decls.Timestamp}, decls.Bool), - decls.NewOverload(overloads.GreaterEqualsDuration, - []*exprpb.Type{decls.Duration, decls.Duration}, decls.Bool)), - }...) +// StandardFunctions returns the Decls for all functions in the evaluator. +// +// Deprecated: prefer stdlib.FunctionExprDecls() +func StandardFunctions() []*exprpb.Decl { + return stdlib.FunctionExprDecls() } -// StandardDeclarations returns the Decls for all functions and constants in the evaluator. -func StandardDeclarations() []*exprpb.Decl { - return standardDeclarations +// StandardTypes returns the set of type identifiers for standard library types. +// +// Deprecated: prefer stdlib.TypeExprDecls() +func StandardTypes() []*exprpb.Decl { + return stdlib.TypeExprDecls() } diff --git a/vendor/github.com/google/cel-go/checker/types.go b/vendor/github.com/google/cel-go/checker/types.go index 28d21c9d9..e2373d1b7 100644 --- a/vendor/github.com/google/cel-go/checker/types.go +++ b/vendor/github.com/google/cel-go/checker/types.go @@ -15,154 +15,54 @@ package checker import ( - "fmt" - "strings" - - "github.com/google/cel-go/checker/decls" - - "google.golang.org/protobuf/proto" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" ) -const ( - kindUnknown = iota + 1 - kindError - kindFunction - kindDyn - kindPrimitive - kindWellKnown - kindWrapper - kindNull - kindAbstract - kindType - kindList - kindMap - kindObject - kindTypeParam -) - -// FormatCheckedType converts a type message into a string representation. -func FormatCheckedType(t *exprpb.Type) string { - switch kindOf(t) { - case kindDyn: - return "dyn" - case kindFunction: - return formatFunction(t.GetFunction().GetResultType(), - t.GetFunction().GetArgTypes(), - false) - case kindList: - return fmt.Sprintf("list(%s)", FormatCheckedType(t.GetListType().GetElemType())) - case kindObject: - return t.GetMessageType() - case kindMap: - return fmt.Sprintf("map(%s, %s)", - FormatCheckedType(t.GetMapType().GetKeyType()), - FormatCheckedType(t.GetMapType().GetValueType())) - case kindNull: - return "null" - case kindPrimitive: - switch t.GetPrimitive() { - case exprpb.Type_UINT64: - return "uint" - case exprpb.Type_INT64: - return "int" - } - return strings.Trim(strings.ToLower(t.GetPrimitive().String()), " ") - case kindType: - if t.GetType() == nil { - return "type" - } - return fmt.Sprintf("type(%s)", FormatCheckedType(t.GetType())) - case kindWellKnown: - switch t.GetWellKnown() { - case exprpb.Type_ANY: - return "any" - case exprpb.Type_DURATION: - return "duration" - case exprpb.Type_TIMESTAMP: - return "timestamp" - } - case kindWrapper: - return fmt.Sprintf("wrapper(%s)", - FormatCheckedType(decls.NewPrimitiveType(t.GetWrapper()))) - case kindError: - return "!error!" - case kindTypeParam: - return t.GetTypeParam() - case kindAbstract: - at := t.GetAbstractType() - params := at.GetParameterTypes() - paramStrs := make([]string, len(params)) - for i, p := range params { - paramStrs[i] = FormatCheckedType(p) - } - return fmt.Sprintf("%s(%s)", at.GetName(), strings.Join(paramStrs, ", ")) - } - return t.String() -} - // isDyn returns true if the input t is either type DYN or a well-known ANY message. -func isDyn(t *exprpb.Type) bool { +func isDyn(t *types.Type) bool { // Note: object type values that are well-known and map to a DYN value in practice // are sanitized prior to being added to the environment. - switch kindOf(t) { - case kindDyn: + switch t.Kind() { + case types.DynKind, types.AnyKind: return true - case kindWellKnown: - return t.GetWellKnown() == exprpb.Type_ANY default: return false } } // isDynOrError returns true if the input is either an Error, DYN, or well-known ANY message. -func isDynOrError(t *exprpb.Type) bool { +func isDynOrError(t *types.Type) bool { return isError(t) || isDyn(t) } -func isError(t *exprpb.Type) bool { - return kindOf(t) == kindError +func isError(t *types.Type) bool { + return t.Kind() == types.ErrorKind } -func isOptional(t *exprpb.Type) bool { - if kindOf(t) == kindAbstract { - at := t.GetAbstractType() - return at.GetName() == "optional" +func isOptional(t *types.Type) bool { + if t.Kind() == types.OpaqueKind { + return t.TypeName() == "optional" } return false } -func maybeUnwrapOptional(t *exprpb.Type) (*exprpb.Type, bool) { +func maybeUnwrapOptional(t *types.Type) (*types.Type, bool) { if isOptional(t) { - at := t.GetAbstractType() - return at.GetParameterTypes()[0], true + return t.Parameters()[0], true } return t, false } -func maybeUnwrapString(e *exprpb.Expr) (string, bool) { - switch e.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - literal := e.GetConstExpr() - switch literal.GetConstantKind().(type) { - case *exprpb.Constant_StringValue: - return literal.GetStringValue(), true - } - } - return "", false -} - // isEqualOrLessSpecific checks whether one type is equal or less specific than the other one. // A type is less specific if it matches the other type using the DYN type. -func isEqualOrLessSpecific(t1 *exprpb.Type, t2 *exprpb.Type) bool { - kind1, kind2 := kindOf(t1), kindOf(t2) +func isEqualOrLessSpecific(t1, t2 *types.Type) bool { + kind1, kind2 := t1.Kind(), t2.Kind() // The first type is less specific. - if isDyn(t1) || kind1 == kindTypeParam { + if isDyn(t1) || kind1 == types.TypeParamKind { return true } // The first type is not less specific. - if isDyn(t2) || kind2 == kindTypeParam { + if isDyn(t2) || kind2 == types.TypeParamKind { return false } // Types must be of the same kind to be equal. @@ -173,38 +73,34 @@ func isEqualOrLessSpecific(t1 *exprpb.Type, t2 *exprpb.Type) bool { // With limited exceptions for ANY and JSON values, the types must agree and be equivalent in // order to return true. switch kind1 { - case kindAbstract: - a1 := t1.GetAbstractType() - a2 := t2.GetAbstractType() - if a1.GetName() != a2.GetName() || - len(a1.GetParameterTypes()) != len(a2.GetParameterTypes()) { + case types.OpaqueKind: + if t1.TypeName() != t2.TypeName() || + len(t1.Parameters()) != len(t2.Parameters()) { return false } - for i, p1 := range a1.GetParameterTypes() { - if !isEqualOrLessSpecific(p1, a2.GetParameterTypes()[i]) { + for i, p1 := range t1.Parameters() { + if !isEqualOrLessSpecific(p1, t2.Parameters()[i]) { return false } } return true - case kindList: - return isEqualOrLessSpecific(t1.GetListType().GetElemType(), t2.GetListType().GetElemType()) - case kindMap: - m1 := t1.GetMapType() - m2 := t2.GetMapType() - return isEqualOrLessSpecific(m1.GetKeyType(), m2.GetKeyType()) && - isEqualOrLessSpecific(m1.GetValueType(), m2.GetValueType()) - case kindType: + case types.ListKind: + return isEqualOrLessSpecific(t1.Parameters()[0], t2.Parameters()[0]) + case types.MapKind: + return isEqualOrLessSpecific(t1.Parameters()[0], t2.Parameters()[0]) && + isEqualOrLessSpecific(t1.Parameters()[1], t2.Parameters()[1]) + case types.TypeKind: return true default: - return proto.Equal(t1, t2) + return t1.IsExactType(t2) } } // / internalIsAssignable returns true if t1 is assignable to t2. -func internalIsAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) bool { +func internalIsAssignable(m *mapping, t1, t2 *types.Type) bool { // Process type parameters. - kind1, kind2 := kindOf(t1), kindOf(t2) - if kind2 == kindTypeParam { + kind1, kind2 := t1.Kind(), t2.Kind() + if kind2 == types.TypeParamKind { // If t2 is a valid type substitution for t1, return true. valid, t2HasSub := isValidTypeSubstitution(m, t1, t2) if valid { @@ -217,7 +113,7 @@ func internalIsAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) bool { } // Otherwise, fall through to check whether t1 is a possible substitution for t2. } - if kind1 == kindTypeParam { + if kind1 == types.TypeParamKind { // Return whether t1 is a valid substitution for t2. If not, do no additional checks as the // possible type substitutions have been searched in both directions. valid, _ := isValidTypeSubstitution(m, t2, t1) @@ -228,40 +124,25 @@ func internalIsAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) bool { if isDynOrError(t1) || isDynOrError(t2) { return true } - - // Test for when the types do not need to agree, but are more specific than dyn. - switch kind1 { - case kindNull: + // Preserve the nullness checks of the legacy type-checker. + if kind1 == types.NullTypeKind { return internalIsAssignableNull(t2) - case kindPrimitive: - return internalIsAssignablePrimitive(t1.GetPrimitive(), t2) - case kindWrapper: - return internalIsAssignable(m, decls.NewPrimitiveType(t1.GetWrapper()), t2) - default: - if kind1 != kind2 { - return false - } + } + if kind2 == types.NullTypeKind { + return internalIsAssignableNull(t1) } - // Test for when the types must agree. + // Test for when the types do not need to agree, but are more specific than dyn. switch kind1 { - // ERROR, TYPE_PARAM, and DYN handled above. - case kindAbstract: - return internalIsAssignableAbstractType(m, t1.GetAbstractType(), t2.GetAbstractType()) - case kindFunction: - return internalIsAssignableFunction(m, t1.GetFunction(), t2.GetFunction()) - case kindList: - return internalIsAssignable(m, t1.GetListType().GetElemType(), t2.GetListType().GetElemType()) - case kindMap: - return internalIsAssignableMap(m, t1.GetMapType(), t2.GetMapType()) - case kindObject: - return t1.GetMessageType() == t2.GetMessageType() - case kindType: - // A type is a type is a type, any additional parameterization of the - // type cannot affect method resolution or assignability. - return true - case kindWellKnown: - return t1.GetWellKnown() == t2.GetWellKnown() + case types.BoolKind, types.BytesKind, types.DoubleKind, types.IntKind, types.StringKind, types.UintKind, + types.AnyKind, types.DurationKind, types.TimestampKind, + types.StructKind: + return t1.IsAssignableType(t2) + case types.TypeKind: + return kind2 == types.TypeKind + case types.OpaqueKind, types.ListKind, types.MapKind: + return t1.Kind() == t2.Kind() && t1.TypeName() == t2.TypeName() && + internalIsAssignableList(m, t1.Parameters(), t2.Parameters()) default: return false } @@ -274,16 +155,16 @@ func internalIsAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) bool { // - t2 has a type substitution (t2sub) equal to t1 // - t2 has a type substitution (t2sub) assignable to t1 // - t2 does not occur within t1. -func isValidTypeSubstitution(m *mapping, t1, t2 *exprpb.Type) (valid, hasSub bool) { +func isValidTypeSubstitution(m *mapping, t1, t2 *types.Type) (valid, hasSub bool) { // Early return if the t1 and t2 are the same instance. - kind1, kind2 := kindOf(t1), kindOf(t2) - if kind1 == kind2 && (t1 == t2 || proto.Equal(t1, t2)) { + kind1, kind2 := t1.Kind(), t2.Kind() + if kind1 == kind2 && t1.IsExactType(t2) { return true, true } if t2Sub, found := m.find(t2); found { // Early return if t1 and t2Sub are the same instance as otherwise the mapping // might mark a type as being a subtitution for itself. - if kind1 == kindOf(t2Sub) && (t1 == t2Sub || proto.Equal(t1, t2Sub)) { + if kind1 == t2Sub.Kind() && t1.IsExactType(t2Sub) { return true, true } // If the types are compatible, pick the more general type and return true @@ -305,28 +186,10 @@ func isValidTypeSubstitution(m *mapping, t1, t2 *exprpb.Type) (valid, hasSub boo return false, false } -// internalIsAssignableAbstractType returns true if the abstract type names agree and all type -// parameters are assignable. -func internalIsAssignableAbstractType(m *mapping, a1 *exprpb.Type_AbstractType, a2 *exprpb.Type_AbstractType) bool { - return a1.GetName() == a2.GetName() && - internalIsAssignableList(m, a1.GetParameterTypes(), a2.GetParameterTypes()) -} - -// internalIsAssignableFunction returns true if the function return type and arg types are -// assignable. -func internalIsAssignableFunction(m *mapping, f1 *exprpb.Type_FunctionType, f2 *exprpb.Type_FunctionType) bool { - f1ArgTypes := flattenFunctionTypes(f1) - f2ArgTypes := flattenFunctionTypes(f2) - if internalIsAssignableList(m, f1ArgTypes, f2ArgTypes) { - return true - } - return false -} - // internalIsAssignableList returns true if the element types at each index in the list are // assignable from l1[i] to l2[i]. The list lengths must also agree for the lists to be // assignable. -func internalIsAssignableList(m *mapping, l1 []*exprpb.Type, l2 []*exprpb.Type) bool { +func internalIsAssignableList(m *mapping, l1, l2 []*types.Type) bool { if len(l1) != len(l2) { return false } @@ -338,41 +201,22 @@ func internalIsAssignableList(m *mapping, l1 []*exprpb.Type, l2 []*exprpb.Type) return true } -// internalIsAssignableMap returns true if map m1 may be assigned to map m2. -func internalIsAssignableMap(m *mapping, m1 *exprpb.Type_MapType, m2 *exprpb.Type_MapType) bool { - if internalIsAssignableList(m, - []*exprpb.Type{m1.GetKeyType(), m1.GetValueType()}, - []*exprpb.Type{m2.GetKeyType(), m2.GetValueType()}) { - return true - } - return false -} - // internalIsAssignableNull returns true if the type is nullable. -func internalIsAssignableNull(t *exprpb.Type) bool { - switch kindOf(t) { - case kindAbstract, kindObject, kindNull, kindWellKnown, kindWrapper: - return true - default: - return false - } +func internalIsAssignableNull(t *types.Type) bool { + return isLegacyNullable(t) || t.IsAssignableType(types.NullType) } -// internalIsAssignablePrimitive returns true if the target type is the same or if it is a wrapper -// for the primitive type. -func internalIsAssignablePrimitive(p exprpb.Type_PrimitiveType, target *exprpb.Type) bool { - switch kindOf(target) { - case kindPrimitive: - return p == target.GetPrimitive() - case kindWrapper: - return p == target.GetWrapper() - default: - return false +// isLegacyNullable preserves the null-ness compatibility of the original type-checker implementation. +func isLegacyNullable(t *types.Type) bool { + switch t.Kind() { + case types.OpaqueKind, types.StructKind, types.AnyKind, types.DurationKind, types.TimestampKind: + return true } + return false } // isAssignable returns an updated type substitution mapping if t1 is assignable to t2. -func isAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) *mapping { +func isAssignable(m *mapping, t1, t2 *types.Type) *mapping { mCopy := m.copy() if internalIsAssignable(mCopy, t1, t2) { return mCopy @@ -381,7 +225,7 @@ func isAssignable(m *mapping, t1 *exprpb.Type, t2 *exprpb.Type) *mapping { } // isAssignableList returns an updated type substitution mapping if l1 is assignable to l2. -func isAssignableList(m *mapping, l1 []*exprpb.Type, l2 []*exprpb.Type) *mapping { +func isAssignableList(m *mapping, l1, l2 []*types.Type) *mapping { mCopy := m.copy() if internalIsAssignableList(mCopy, l1, l2) { return mCopy @@ -389,44 +233,8 @@ func isAssignableList(m *mapping, l1 []*exprpb.Type, l2 []*exprpb.Type) *mapping return nil } -// kindOf returns the kind of the type as defined in the checked.proto. -func kindOf(t *exprpb.Type) int { - if t == nil || t.TypeKind == nil { - return kindUnknown - } - switch t.GetTypeKind().(type) { - case *exprpb.Type_Error: - return kindError - case *exprpb.Type_Function: - return kindFunction - case *exprpb.Type_Dyn: - return kindDyn - case *exprpb.Type_Primitive: - return kindPrimitive - case *exprpb.Type_WellKnown: - return kindWellKnown - case *exprpb.Type_Wrapper: - return kindWrapper - case *exprpb.Type_Null: - return kindNull - case *exprpb.Type_Type: - return kindType - case *exprpb.Type_ListType_: - return kindList - case *exprpb.Type_MapType_: - return kindMap - case *exprpb.Type_MessageType: - return kindObject - case *exprpb.Type_TypeParam: - return kindTypeParam - case *exprpb.Type_AbstractType_: - return kindAbstract - } - return kindUnknown -} - // mostGeneral returns the more general of two types which are known to unify. -func mostGeneral(t1 *exprpb.Type, t2 *exprpb.Type) *exprpb.Type { +func mostGeneral(t1, t2 *types.Type) *types.Type { if isEqualOrLessSpecific(t1, t2) { return t1 } @@ -436,32 +244,25 @@ func mostGeneral(t1 *exprpb.Type, t2 *exprpb.Type) *exprpb.Type { // notReferencedIn checks whether the type doesn't appear directly or transitively within the other // type. This is a standard requirement for type unification, commonly referred to as the "occurs // check". -func notReferencedIn(m *mapping, t *exprpb.Type, withinType *exprpb.Type) bool { - if proto.Equal(t, withinType) { +func notReferencedIn(m *mapping, t, withinType *types.Type) bool { + if t.IsExactType(withinType) { return false } - withinKind := kindOf(withinType) + withinKind := withinType.Kind() switch withinKind { - case kindTypeParam: + case types.TypeParamKind: wtSub, found := m.find(withinType) if !found { return true } return notReferencedIn(m, t, wtSub) - case kindAbstract: - for _, pt := range withinType.GetAbstractType().GetParameterTypes() { + case types.OpaqueKind, types.ListKind, types.MapKind: + for _, pt := range withinType.Parameters() { if !notReferencedIn(m, t, pt) { return false } } return true - case kindList: - return notReferencedIn(m, t, withinType.GetListType().GetElemType()) - case kindMap: - mt := withinType.GetMapType() - return notReferencedIn(m, t, mt.GetKeyType()) && notReferencedIn(m, t, mt.GetValueType()) - case kindWrapper: - return notReferencedIn(m, t, decls.NewPrimitiveType(withinType.GetWrapper())) default: return true } @@ -469,39 +270,25 @@ func notReferencedIn(m *mapping, t *exprpb.Type, withinType *exprpb.Type) bool { // substitute replaces all direct and indirect occurrences of bound type parameters. Unbound type // parameters are replaced by DYN if typeParamToDyn is true. -func substitute(m *mapping, t *exprpb.Type, typeParamToDyn bool) *exprpb.Type { +func substitute(m *mapping, t *types.Type, typeParamToDyn bool) *types.Type { if tSub, found := m.find(t); found { return substitute(m, tSub, typeParamToDyn) } - kind := kindOf(t) - if typeParamToDyn && kind == kindTypeParam { - return decls.Dyn + kind := t.Kind() + if typeParamToDyn && kind == types.TypeParamKind { + return types.DynType } switch kind { - case kindAbstract: - at := t.GetAbstractType() - params := make([]*exprpb.Type, len(at.GetParameterTypes())) - for i, p := range at.GetParameterTypes() { - params[i] = substitute(m, p, typeParamToDyn) - } - return decls.NewAbstractType(at.GetName(), params...) - case kindFunction: - fn := t.GetFunction() - rt := substitute(m, fn.ResultType, typeParamToDyn) - args := make([]*exprpb.Type, len(fn.GetArgTypes())) - for i, a := range fn.ArgTypes { - args[i] = substitute(m, a, typeParamToDyn) - } - return decls.NewFunctionType(rt, args...) - case kindList: - return decls.NewListType(substitute(m, t.GetListType().GetElemType(), typeParamToDyn)) - case kindMap: - mt := t.GetMapType() - return decls.NewMapType(substitute(m, mt.GetKeyType(), typeParamToDyn), - substitute(m, mt.GetValueType(), typeParamToDyn)) - case kindType: - if t.GetType() != nil { - return decls.NewTypeType(substitute(m, t.GetType(), typeParamToDyn)) + case types.OpaqueKind: + return types.NewOpaqueType(t.TypeName(), substituteParams(m, t.Parameters(), typeParamToDyn)...) + case types.ListKind: + return types.NewListType(substitute(m, t.Parameters()[0], typeParamToDyn)) + case types.MapKind: + return types.NewMapType(substitute(m, t.Parameters()[0], typeParamToDyn), + substitute(m, t.Parameters()[1], typeParamToDyn)) + case types.TypeKind: + if len(t.Parameters()) > 0 { + return types.NewTypeTypeWithParam(substitute(m, t.Parameters()[0], typeParamToDyn)) } return t default: @@ -509,21 +296,14 @@ func substitute(m *mapping, t *exprpb.Type, typeParamToDyn bool) *exprpb.Type { } } -func typeKey(t *exprpb.Type) string { - return FormatCheckedType(t) +func substituteParams(m *mapping, typeParams []*types.Type, typeParamToDyn bool) []*types.Type { + subParams := make([]*types.Type, len(typeParams)) + for i, tp := range typeParams { + subParams[i] = substitute(m, tp, typeParamToDyn) + } + return subParams } -// flattenFunctionTypes takes a function with arg types T1, T2, ..., TN and result type TR -// and returns a slice containing {T1, T2, ..., TN, TR}. -func flattenFunctionTypes(f *exprpb.Type_FunctionType) []*exprpb.Type { - argTypes := f.GetArgTypes() - if len(argTypes) == 0 { - return []*exprpb.Type{f.GetResultType()} - } - flattend := make([]*exprpb.Type, len(argTypes)+1, len(argTypes)+1) - for i, at := range argTypes { - flattend[i] = at - } - flattend[len(argTypes)] = f.GetResultType() - return flattend +func newFunctionType(resultType *types.Type, argTypes ...*types.Type) *types.Type { + return types.NewOpaqueType("function", append([]*types.Type{resultType}, argTypes...)...) } diff --git a/vendor/github.com/google/cel-go/common/ast/BUILD.bazel b/vendor/github.com/google/cel-go/common/ast/BUILD.bazel new file mode 100644 index 000000000..c92a0f179 --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/BUILD.bazel @@ -0,0 +1,61 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package( + default_visibility = [ + "//cel:__subpackages__", + "//checker:__subpackages__", + "//common:__subpackages__", + "//ext:__subpackages__", + "//interpreter:__subpackages__", + "//parser:__subpackages__", + ], + licenses = ["notice"], # Apache 2.0 +) + +go_library( + name = "go_default_library", + srcs = [ + "ast.go", + "conversion.go", + "expr.go", + "factory.go", + "navigable.go", + ], + importpath = "github.com/google/cel-go/common/ast", + deps = [ + "//common:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", + "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "@org_golang_google_protobuf//types/known/structpb:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = [ + "ast_test.go", + "conversion_test.go", + "expr_test.go", + "navigable_test.go", + ], + embed = [ + ":go_default_library", + ], + deps = [ + "//checker:go_default_library", + "//checker/decls:go_default_library", + "//common:go_default_library", + "//common/containers:go_default_library", + "//common/decls:go_default_library", + "//common/overloads:go_default_library", + "//common/stdlib:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", + "//parser:go_default_library", + "//test/proto3pb:go_default_library", + "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "@org_golang_google_protobuf//proto:go_default_library", + "@org_golang_google_protobuf//encoding/prototext:go_default_library", + ], +) \ No newline at end of file diff --git a/vendor/github.com/google/cel-go/common/ast/ast.go b/vendor/github.com/google/cel-go/common/ast/ast.go new file mode 100644 index 000000000..4feddaa3a --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/ast.go @@ -0,0 +1,450 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package ast declares data structures useful for parsed and checked abstract syntax trees +package ast + +import ( + "github.com/google/cel-go/common" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" +) + +// AST contains a protobuf expression and source info along with CEL-native type and reference information. +type AST struct { + expr Expr + sourceInfo *SourceInfo + typeMap map[int64]*types.Type + refMap map[int64]*ReferenceInfo +} + +// Expr returns the root ast.Expr value in the AST. +func (a *AST) Expr() Expr { + if a == nil { + return nilExpr + } + return a.expr +} + +// SourceInfo returns the source metadata associated with the parse / type-check passes. +func (a *AST) SourceInfo() *SourceInfo { + if a == nil { + return nil + } + return a.sourceInfo +} + +// GetType returns the type for the expression at the given id, if one exists, else types.DynType. +func (a *AST) GetType(id int64) *types.Type { + if t, found := a.TypeMap()[id]; found { + return t + } + return types.DynType +} + +// SetType sets the type of the expression node at the given id. +func (a *AST) SetType(id int64, t *types.Type) { + if a == nil { + return + } + a.typeMap[id] = t +} + +// TypeMap returns the map of expression ids to type-checked types. +// +// If the AST is not type-checked, the map will be empty. +func (a *AST) TypeMap() map[int64]*types.Type { + if a == nil { + return map[int64]*types.Type{} + } + return a.typeMap +} + +// GetOverloadIDs returns the set of overload function names for a given expression id. +// +// If the expression id is not a function call, or the AST is not type-checked, the result will be empty. +func (a *AST) GetOverloadIDs(id int64) []string { + if ref, found := a.ReferenceMap()[id]; found { + return ref.OverloadIDs + } + return []string{} +} + +// ReferenceMap returns the map of expression id to identifier, constant, and function references. +func (a *AST) ReferenceMap() map[int64]*ReferenceInfo { + if a == nil { + return map[int64]*ReferenceInfo{} + } + return a.refMap +} + +// SetReference adds a reference to the checked AST type map. +func (a *AST) SetReference(id int64, r *ReferenceInfo) { + if a == nil { + return + } + a.refMap[id] = r +} + +// IsChecked returns whether the AST is type-checked. +func (a *AST) IsChecked() bool { + return a != nil && len(a.TypeMap()) > 0 +} + +// NewAST creates a base AST instance with an ast.Expr and ast.SourceInfo value. +func NewAST(e Expr, sourceInfo *SourceInfo) *AST { + if e == nil { + e = nilExpr + } + return &AST{ + expr: e, + sourceInfo: sourceInfo, + typeMap: make(map[int64]*types.Type), + refMap: make(map[int64]*ReferenceInfo), + } +} + +// NewCheckedAST wraps an parsed AST and augments it with type and reference metadata. +func NewCheckedAST(parsed *AST, typeMap map[int64]*types.Type, refMap map[int64]*ReferenceInfo) *AST { + return &AST{ + expr: parsed.Expr(), + sourceInfo: parsed.SourceInfo(), + typeMap: typeMap, + refMap: refMap, + } +} + +// Copy creates a deep copy of the Expr and SourceInfo values in the input AST. +// +// Copies of the Expr value are generated using an internal default ExprFactory. +func Copy(a *AST) *AST { + if a == nil { + return nil + } + e := defaultFactory.CopyExpr(a.expr) + if !a.IsChecked() { + return NewAST(e, CopySourceInfo(a.SourceInfo())) + } + typesCopy := make(map[int64]*types.Type, len(a.typeMap)) + for id, t := range a.typeMap { + typesCopy[id] = t + } + refsCopy := make(map[int64]*ReferenceInfo, len(a.refMap)) + for id, r := range a.refMap { + refsCopy[id] = r + } + return NewCheckedAST(NewAST(e, CopySourceInfo(a.SourceInfo())), typesCopy, refsCopy) +} + +// MaxID returns the upper-bound, non-inclusive, of ids present within the AST's Expr value. +func MaxID(a *AST) int64 { + visitor := &maxIDVisitor{maxID: 1} + PostOrderVisit(a.Expr(), visitor) + return visitor.maxID + 1 +} + +// NewSourceInfo creates a simple SourceInfo object from an input common.Source value. +func NewSourceInfo(src common.Source) *SourceInfo { + var lineOffsets []int32 + var desc string + baseLine := int32(0) + baseCol := int32(0) + if src != nil { + desc = src.Description() + lineOffsets = src.LineOffsets() + // Determine whether the source metadata should be computed relative + // to a base line and column value. This can be determined by requesting + // the location for offset 0 from the source object. + if loc, found := src.OffsetLocation(0); found { + baseLine = int32(loc.Line()) - 1 + baseCol = int32(loc.Column()) + } + } + return &SourceInfo{ + desc: desc, + lines: lineOffsets, + baseLine: baseLine, + baseCol: baseCol, + offsetRanges: make(map[int64]OffsetRange), + macroCalls: make(map[int64]Expr), + } +} + +// CopySourceInfo creates a deep copy of the MacroCalls within the input SourceInfo. +// +// Copies of macro Expr values are generated using an internal default ExprFactory. +func CopySourceInfo(info *SourceInfo) *SourceInfo { + if info == nil { + return nil + } + rangesCopy := make(map[int64]OffsetRange, len(info.offsetRanges)) + for id, off := range info.offsetRanges { + rangesCopy[id] = off + } + callsCopy := make(map[int64]Expr, len(info.macroCalls)) + for id, call := range info.macroCalls { + callsCopy[id] = defaultFactory.CopyExpr(call) + } + return &SourceInfo{ + syntax: info.syntax, + desc: info.desc, + lines: info.lines, + baseLine: info.baseLine, + baseCol: info.baseCol, + offsetRanges: rangesCopy, + macroCalls: callsCopy, + } +} + +// SourceInfo records basic information about the expression as a textual input and +// as a parsed expression value. +type SourceInfo struct { + syntax string + desc string + lines []int32 + baseLine int32 + baseCol int32 + offsetRanges map[int64]OffsetRange + macroCalls map[int64]Expr +} + +// SyntaxVersion returns the syntax version associated with the text expression. +func (s *SourceInfo) SyntaxVersion() string { + if s == nil { + return "" + } + return s.syntax +} + +// Description provides information about where the expression came from. +func (s *SourceInfo) Description() string { + if s == nil { + return "" + } + return s.desc +} + +// LineOffsets returns a list of the 0-based character offsets in the input text where newlines appear. +func (s *SourceInfo) LineOffsets() []int32 { + if s == nil { + return []int32{} + } + return s.lines +} + +// MacroCalls returns a map of expression id to ast.Expr value where the id represents the expression +// node where the macro was inserted into the AST, and the ast.Expr value represents the original call +// signature which was replaced. +func (s *SourceInfo) MacroCalls() map[int64]Expr { + if s == nil { + return map[int64]Expr{} + } + return s.macroCalls +} + +// GetMacroCall returns the original ast.Expr value for the given expression if it was generated via +// a macro replacement. +// +// Note, parsing options must be enabled to track macro calls before this method will return a value. +func (s *SourceInfo) GetMacroCall(id int64) (Expr, bool) { + e, found := s.MacroCalls()[id] + return e, found +} + +// SetMacroCall records a macro call at a specific location. +func (s *SourceInfo) SetMacroCall(id int64, e Expr) { + if s != nil { + s.macroCalls[id] = e + } +} + +// ClearMacroCall removes the macro call at the given expression id. +func (s *SourceInfo) ClearMacroCall(id int64) { + if s != nil { + delete(s.macroCalls, id) + } +} + +// OffsetRanges returns a map of expression id to OffsetRange values where the range indicates either: +// the start and end position in the input stream where the expression occurs, or the start position +// only. If the range only captures start position, the stop position of the range will be equal to +// the start. +func (s *SourceInfo) OffsetRanges() map[int64]OffsetRange { + if s == nil { + return map[int64]OffsetRange{} + } + return s.offsetRanges +} + +// GetOffsetRange retrieves an OffsetRange for the given expression id if one exists. +func (s *SourceInfo) GetOffsetRange(id int64) (OffsetRange, bool) { + if s == nil { + return OffsetRange{}, false + } + o, found := s.offsetRanges[id] + return o, found +} + +// SetOffsetRange sets the OffsetRange for the given expression id. +func (s *SourceInfo) SetOffsetRange(id int64, o OffsetRange) { + if s == nil { + return + } + s.offsetRanges[id] = o +} + +// GetStartLocation calculates the human-readable 1-based line and 0-based column of the first character +// of the expression node at the id. +func (s *SourceInfo) GetStartLocation(id int64) common.Location { + if o, found := s.GetOffsetRange(id); found { + line := 1 + col := int(o.Start) + for _, lineOffset := range s.LineOffsets() { + if lineOffset < o.Start { + line++ + col = int(o.Start - lineOffset) + } else { + break + } + } + return common.NewLocation(line, col) + } + return common.NoLocation +} + +// GetStopLocation calculates the human-readable 1-based line and 0-based column of the last character for +// the expression node at the given id. +// +// If the SourceInfo was generated from a serialized protobuf representation, the stop location will +// be identical to the start location for the expression. +func (s *SourceInfo) GetStopLocation(id int64) common.Location { + if o, found := s.GetOffsetRange(id); found { + line := 1 + col := int(o.Stop) + for _, lineOffset := range s.LineOffsets() { + if lineOffset < o.Stop { + line++ + col = int(o.Stop - lineOffset) + } else { + break + } + } + return common.NewLocation(line, col) + } + return common.NoLocation +} + +// ComputeOffset calculates the 0-based character offset from a 1-based line and 0-based column. +func (s *SourceInfo) ComputeOffset(line, col int32) int32 { + if s != nil { + line = s.baseLine + line + col = s.baseCol + col + } + if line == 1 { + return col + } + if line < 1 || line > int32(len(s.LineOffsets())) { + return -1 + } + offset := s.LineOffsets()[line-2] + return offset + col +} + +// OffsetRange captures the start and stop positions of a section of text in the input expression. +type OffsetRange struct { + Start int32 + Stop int32 +} + +// ReferenceInfo contains a CEL native representation of an identifier reference which may refer to +// either a qualified identifier name, a set of overload ids, or a constant value from an enum. +type ReferenceInfo struct { + Name string + OverloadIDs []string + Value ref.Val +} + +// NewIdentReference creates a ReferenceInfo instance for an identifier with an optional constant value. +func NewIdentReference(name string, value ref.Val) *ReferenceInfo { + return &ReferenceInfo{Name: name, Value: value} +} + +// NewFunctionReference creates a ReferenceInfo instance for a set of function overloads. +func NewFunctionReference(overloads ...string) *ReferenceInfo { + info := &ReferenceInfo{} + for _, id := range overloads { + info.AddOverload(id) + } + return info +} + +// AddOverload appends a function overload ID to the ReferenceInfo. +func (r *ReferenceInfo) AddOverload(overloadID string) { + for _, id := range r.OverloadIDs { + if id == overloadID { + return + } + } + r.OverloadIDs = append(r.OverloadIDs, overloadID) +} + +// Equals returns whether two references are identical to each other. +func (r *ReferenceInfo) Equals(other *ReferenceInfo) bool { + if r.Name != other.Name { + return false + } + if len(r.OverloadIDs) != len(other.OverloadIDs) { + return false + } + if len(r.OverloadIDs) != 0 { + overloadMap := make(map[string]struct{}, len(r.OverloadIDs)) + for _, id := range r.OverloadIDs { + overloadMap[id] = struct{}{} + } + for _, id := range other.OverloadIDs { + _, found := overloadMap[id] + if !found { + return false + } + } + } + if r.Value == nil && other.Value == nil { + return true + } + if r.Value == nil && other.Value != nil || + r.Value != nil && other.Value == nil || + r.Value.Equal(other.Value) != types.True { + return false + } + return true +} + +type maxIDVisitor struct { + maxID int64 + *baseVisitor +} + +// VisitExpr updates the max identifier if the incoming expression id is greater than previously observed. +func (v *maxIDVisitor) VisitExpr(e Expr) { + if v.maxID < e.ID() { + v.maxID = e.ID() + } +} + +// VisitEntryExpr updates the max identifier if the incoming entry id is greater than previously observed. +func (v *maxIDVisitor) VisitEntryExpr(e EntryExpr) { + if v.maxID < e.ID() { + v.maxID = e.ID() + } +} diff --git a/vendor/github.com/google/cel-go/common/ast/conversion.go b/vendor/github.com/google/cel-go/common/ast/conversion.go new file mode 100644 index 000000000..8f2c4bd1e --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/conversion.go @@ -0,0 +1,632 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "fmt" + + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + + structpb "google.golang.org/protobuf/types/known/structpb" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +// ToProto converts an AST to a CheckedExpr protobouf. +func ToProto(ast *AST) (*exprpb.CheckedExpr, error) { + refMap := make(map[int64]*exprpb.Reference, len(ast.ReferenceMap())) + for id, ref := range ast.ReferenceMap() { + r, err := ReferenceInfoToProto(ref) + if err != nil { + return nil, err + } + refMap[id] = r + } + typeMap := make(map[int64]*exprpb.Type, len(ast.TypeMap())) + for id, typ := range ast.TypeMap() { + t, err := types.TypeToExprType(typ) + if err != nil { + return nil, err + } + typeMap[id] = t + } + e, err := ExprToProto(ast.Expr()) + if err != nil { + return nil, err + } + info, err := SourceInfoToProto(ast.SourceInfo()) + if err != nil { + return nil, err + } + return &exprpb.CheckedExpr{ + Expr: e, + SourceInfo: info, + ReferenceMap: refMap, + TypeMap: typeMap, + }, nil +} + +// ToAST converts a CheckedExpr protobuf to an AST instance. +func ToAST(checked *exprpb.CheckedExpr) (*AST, error) { + refMap := make(map[int64]*ReferenceInfo, len(checked.GetReferenceMap())) + for id, ref := range checked.GetReferenceMap() { + r, err := ProtoToReferenceInfo(ref) + if err != nil { + return nil, err + } + refMap[id] = r + } + typeMap := make(map[int64]*types.Type, len(checked.GetTypeMap())) + for id, typ := range checked.GetTypeMap() { + t, err := types.ExprTypeToType(typ) + if err != nil { + return nil, err + } + typeMap[id] = t + } + info, err := ProtoToSourceInfo(checked.GetSourceInfo()) + if err != nil { + return nil, err + } + root, err := ProtoToExpr(checked.GetExpr()) + if err != nil { + return nil, err + } + ast := NewCheckedAST(NewAST(root, info), typeMap, refMap) + return ast, nil +} + +// ProtoToExpr converts a protobuf Expr value to an ast.Expr value. +func ProtoToExpr(e *exprpb.Expr) (Expr, error) { + factory := NewExprFactory() + return exprInternal(factory, e) +} + +// ProtoToEntryExpr converts a protobuf struct/map entry to an ast.EntryExpr +func ProtoToEntryExpr(e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + factory := NewExprFactory() + switch e.GetKeyKind().(type) { + case *exprpb.Expr_CreateStruct_Entry_FieldKey: + return exprStructField(factory, e.GetId(), e) + case *exprpb.Expr_CreateStruct_Entry_MapKey: + return exprMapEntry(factory, e.GetId(), e) + } + return nil, fmt.Errorf("unsupported expr entry kind: %v", e) +} + +func exprInternal(factory ExprFactory, e *exprpb.Expr) (Expr, error) { + id := e.GetId() + switch e.GetExprKind().(type) { + case *exprpb.Expr_CallExpr: + return exprCall(factory, id, e.GetCallExpr()) + case *exprpb.Expr_ComprehensionExpr: + return exprComprehension(factory, id, e.GetComprehensionExpr()) + case *exprpb.Expr_ConstExpr: + return exprLiteral(factory, id, e.GetConstExpr()) + case *exprpb.Expr_IdentExpr: + return exprIdent(factory, id, e.GetIdentExpr()) + case *exprpb.Expr_ListExpr: + return exprList(factory, id, e.GetListExpr()) + case *exprpb.Expr_SelectExpr: + return exprSelect(factory, id, e.GetSelectExpr()) + case *exprpb.Expr_StructExpr: + s := e.GetStructExpr() + if s.GetMessageName() != "" { + return exprStruct(factory, id, s) + } + return exprMap(factory, id, s) + } + return factory.NewUnspecifiedExpr(id), nil +} + +func exprCall(factory ExprFactory, id int64, call *exprpb.Expr_Call) (Expr, error) { + var err error + args := make([]Expr, len(call.GetArgs())) + for i, a := range call.GetArgs() { + args[i], err = exprInternal(factory, a) + if err != nil { + return nil, err + } + } + if call.GetTarget() == nil { + return factory.NewCall(id, call.GetFunction(), args...), nil + } + + target, err := exprInternal(factory, call.GetTarget()) + if err != nil { + return nil, err + } + return factory.NewMemberCall(id, call.GetFunction(), target, args...), nil +} + +func exprComprehension(factory ExprFactory, id int64, comp *exprpb.Expr_Comprehension) (Expr, error) { + iterRange, err := exprInternal(factory, comp.GetIterRange()) + if err != nil { + return nil, err + } + accuInit, err := exprInternal(factory, comp.GetAccuInit()) + if err != nil { + return nil, err + } + loopCond, err := exprInternal(factory, comp.GetLoopCondition()) + if err != nil { + return nil, err + } + loopStep, err := exprInternal(factory, comp.GetLoopStep()) + if err != nil { + return nil, err + } + result, err := exprInternal(factory, comp.GetResult()) + if err != nil { + return nil, err + } + return factory.NewComprehension(id, + iterRange, + comp.GetIterVar(), + comp.GetAccuVar(), + accuInit, + loopCond, + loopStep, + result), nil +} + +func exprLiteral(factory ExprFactory, id int64, c *exprpb.Constant) (Expr, error) { + val, err := ConstantToVal(c) + if err != nil { + return nil, err + } + return factory.NewLiteral(id, val), nil +} + +func exprIdent(factory ExprFactory, id int64, i *exprpb.Expr_Ident) (Expr, error) { + return factory.NewIdent(id, i.GetName()), nil +} + +func exprList(factory ExprFactory, id int64, l *exprpb.Expr_CreateList) (Expr, error) { + elems := make([]Expr, len(l.GetElements())) + for i, e := range l.GetElements() { + elem, err := exprInternal(factory, e) + if err != nil { + return nil, err + } + elems[i] = elem + } + return factory.NewList(id, elems, l.GetOptionalIndices()), nil +} + +func exprMap(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) { + entries := make([]EntryExpr, len(s.GetEntries())) + var err error + for i, entry := range s.GetEntries() { + entries[i], err = exprMapEntry(factory, entry.GetId(), entry) + if err != nil { + return nil, err + } + } + return factory.NewMap(id, entries), nil +} + +func exprMapEntry(factory ExprFactory, id int64, e *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + k, err := exprInternal(factory, e.GetMapKey()) + if err != nil { + return nil, err + } + v, err := exprInternal(factory, e.GetValue()) + if err != nil { + return nil, err + } + return factory.NewMapEntry(id, k, v, e.GetOptionalEntry()), nil +} + +func exprSelect(factory ExprFactory, id int64, s *exprpb.Expr_Select) (Expr, error) { + op, err := exprInternal(factory, s.GetOperand()) + if err != nil { + return nil, err + } + if s.GetTestOnly() { + return factory.NewPresenceTest(id, op, s.GetField()), nil + } + return factory.NewSelect(id, op, s.GetField()), nil +} + +func exprStruct(factory ExprFactory, id int64, s *exprpb.Expr_CreateStruct) (Expr, error) { + fields := make([]EntryExpr, len(s.GetEntries())) + var err error + for i, field := range s.GetEntries() { + fields[i], err = exprStructField(factory, field.GetId(), field) + if err != nil { + return nil, err + } + } + return factory.NewStruct(id, s.GetMessageName(), fields), nil +} + +func exprStructField(factory ExprFactory, id int64, f *exprpb.Expr_CreateStruct_Entry) (EntryExpr, error) { + v, err := exprInternal(factory, f.GetValue()) + if err != nil { + return nil, err + } + return factory.NewStructField(id, f.GetFieldKey(), v, f.GetOptionalEntry()), nil +} + +// ExprToProto serializes an ast.Expr value to a protobuf Expr representation. +func ExprToProto(e Expr) (*exprpb.Expr, error) { + if e == nil { + return &exprpb.Expr{}, nil + } + switch e.Kind() { + case CallKind: + return protoCall(e.ID(), e.AsCall()) + case ComprehensionKind: + return protoComprehension(e.ID(), e.AsComprehension()) + case IdentKind: + return protoIdent(e.ID(), e.AsIdent()) + case ListKind: + return protoList(e.ID(), e.AsList()) + case LiteralKind: + return protoLiteral(e.ID(), e.AsLiteral()) + case MapKind: + return protoMap(e.ID(), e.AsMap()) + case SelectKind: + return protoSelect(e.ID(), e.AsSelect()) + case StructKind: + return protoStruct(e.ID(), e.AsStruct()) + case UnspecifiedExprKind: + // Handle the case where a macro reference may be getting translated. + // A nested macro 'pointer' is a non-zero expression id with no kind set. + if e.ID() != 0 { + return &exprpb.Expr{Id: e.ID()}, nil + } + return &exprpb.Expr{}, nil + } + return nil, fmt.Errorf("unsupported expr kind: %v", e) +} + +// EntryExprToProto converts an ast.EntryExpr to a protobuf CreateStruct entry +func EntryExprToProto(e EntryExpr) (*exprpb.Expr_CreateStruct_Entry, error) { + switch e.Kind() { + case MapEntryKind: + return protoMapEntry(e.ID(), e.AsMapEntry()) + case StructFieldKind: + return protoStructField(e.ID(), e.AsStructField()) + case UnspecifiedEntryExprKind: + return &exprpb.Expr_CreateStruct_Entry{}, nil + } + return nil, fmt.Errorf("unsupported expr entry kind: %v", e) +} + +func protoCall(id int64, call CallExpr) (*exprpb.Expr, error) { + var err error + var target *exprpb.Expr + if call.IsMemberFunction() { + target, err = ExprToProto(call.Target()) + if err != nil { + return nil, err + } + } + callArgs := call.Args() + args := make([]*exprpb.Expr, len(callArgs)) + for i, a := range callArgs { + args[i], err = ExprToProto(a) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_CallExpr{ + CallExpr: &exprpb.Expr_Call{ + Function: call.FunctionName(), + Target: target, + Args: args, + }, + }, + }, nil +} + +func protoComprehension(id int64, comp ComprehensionExpr) (*exprpb.Expr, error) { + iterRange, err := ExprToProto(comp.IterRange()) + if err != nil { + return nil, err + } + accuInit, err := ExprToProto(comp.AccuInit()) + if err != nil { + return nil, err + } + loopCond, err := ExprToProto(comp.LoopCondition()) + if err != nil { + return nil, err + } + loopStep, err := ExprToProto(comp.LoopStep()) + if err != nil { + return nil, err + } + result, err := ExprToProto(comp.Result()) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ComprehensionExpr{ + ComprehensionExpr: &exprpb.Expr_Comprehension{ + IterVar: comp.IterVar(), + IterRange: iterRange, + AccuVar: comp.AccuVar(), + AccuInit: accuInit, + LoopCondition: loopCond, + LoopStep: loopStep, + Result: result, + }, + }, + }, nil +} + +func protoIdent(id int64, name string) (*exprpb.Expr, error) { + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_IdentExpr{ + IdentExpr: &exprpb.Expr_Ident{ + Name: name, + }, + }, + }, nil +} + +func protoList(id int64, list ListExpr) (*exprpb.Expr, error) { + var err error + elems := make([]*exprpb.Expr, list.Size()) + for i, e := range list.Elements() { + elems[i], err = ExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ListExpr{ + ListExpr: &exprpb.Expr_CreateList{ + Elements: elems, + OptionalIndices: list.OptionalIndices(), + }, + }, + }, nil +} + +func protoLiteral(id int64, val ref.Val) (*exprpb.Expr, error) { + c, err := ValToConstant(val) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_ConstExpr{ + ConstExpr: c, + }, + }, nil +} + +func protoMap(id int64, m MapExpr) (*exprpb.Expr, error) { + entries := make([]*exprpb.Expr_CreateStruct_Entry, len(m.Entries())) + var err error + for i, e := range m.Entries() { + entries[i], err = EntryExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_StructExpr{ + StructExpr: &exprpb.Expr_CreateStruct{ + Entries: entries, + }, + }, + }, nil +} + +func protoMapEntry(id int64, e MapEntry) (*exprpb.Expr_CreateStruct_Entry, error) { + k, err := ExprToProto(e.Key()) + if err != nil { + return nil, err + } + v, err := ExprToProto(e.Value()) + if err != nil { + return nil, err + } + return &exprpb.Expr_CreateStruct_Entry{ + Id: id, + KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{ + MapKey: k, + }, + Value: v, + OptionalEntry: e.IsOptional(), + }, nil +} + +func protoSelect(id int64, s SelectExpr) (*exprpb.Expr, error) { + op, err := ExprToProto(s.Operand()) + if err != nil { + return nil, err + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_SelectExpr{ + SelectExpr: &exprpb.Expr_Select{ + Operand: op, + Field: s.FieldName(), + TestOnly: s.IsTestOnly(), + }, + }, + }, nil +} + +func protoStruct(id int64, s StructExpr) (*exprpb.Expr, error) { + entries := make([]*exprpb.Expr_CreateStruct_Entry, len(s.Fields())) + var err error + for i, e := range s.Fields() { + entries[i], err = EntryExprToProto(e) + if err != nil { + return nil, err + } + } + return &exprpb.Expr{ + Id: id, + ExprKind: &exprpb.Expr_StructExpr{ + StructExpr: &exprpb.Expr_CreateStruct{ + MessageName: s.TypeName(), + Entries: entries, + }, + }, + }, nil +} + +func protoStructField(id int64, f StructField) (*exprpb.Expr_CreateStruct_Entry, error) { + v, err := ExprToProto(f.Value()) + if err != nil { + return nil, err + } + return &exprpb.Expr_CreateStruct_Entry{ + Id: id, + KeyKind: &exprpb.Expr_CreateStruct_Entry_FieldKey{ + FieldKey: f.Name(), + }, + Value: v, + OptionalEntry: f.IsOptional(), + }, nil +} + +// SourceInfoToProto serializes an ast.SourceInfo value to a protobuf SourceInfo object. +func SourceInfoToProto(info *SourceInfo) (*exprpb.SourceInfo, error) { + if info == nil { + return &exprpb.SourceInfo{}, nil + } + sourceInfo := &exprpb.SourceInfo{ + SyntaxVersion: info.SyntaxVersion(), + Location: info.Description(), + LineOffsets: info.LineOffsets(), + Positions: make(map[int64]int32, len(info.OffsetRanges())), + MacroCalls: make(map[int64]*exprpb.Expr, len(info.MacroCalls())), + } + for id, offset := range info.OffsetRanges() { + sourceInfo.Positions[id] = offset.Start + } + for id, e := range info.MacroCalls() { + call, err := ExprToProto(e) + if err != nil { + return nil, err + } + sourceInfo.MacroCalls[id] = call + } + return sourceInfo, nil +} + +// ProtoToSourceInfo deserializes the protobuf into a native SourceInfo value. +func ProtoToSourceInfo(info *exprpb.SourceInfo) (*SourceInfo, error) { + sourceInfo := &SourceInfo{ + syntax: info.GetSyntaxVersion(), + desc: info.GetLocation(), + lines: info.GetLineOffsets(), + offsetRanges: make(map[int64]OffsetRange, len(info.GetPositions())), + macroCalls: make(map[int64]Expr, len(info.GetMacroCalls())), + } + for id, offset := range info.GetPositions() { + sourceInfo.SetOffsetRange(id, OffsetRange{Start: offset, Stop: offset}) + } + for id, e := range info.GetMacroCalls() { + call, err := ProtoToExpr(e) + if err != nil { + return nil, err + } + sourceInfo.SetMacroCall(id, call) + } + return sourceInfo, nil +} + +// ReferenceInfoToProto converts a ReferenceInfo instance to a protobuf Reference suitable for serialization. +func ReferenceInfoToProto(info *ReferenceInfo) (*exprpb.Reference, error) { + c, err := ValToConstant(info.Value) + if err != nil { + return nil, err + } + return &exprpb.Reference{ + Name: info.Name, + OverloadId: info.OverloadIDs, + Value: c, + }, nil +} + +// ProtoToReferenceInfo converts a protobuf Reference into a CEL-native ReferenceInfo instance. +func ProtoToReferenceInfo(ref *exprpb.Reference) (*ReferenceInfo, error) { + v, err := ConstantToVal(ref.GetValue()) + if err != nil { + return nil, err + } + return &ReferenceInfo{ + Name: ref.GetName(), + OverloadIDs: ref.GetOverloadId(), + Value: v, + }, nil +} + +// ValToConstant converts a CEL-native ref.Val to a protobuf Constant. +// +// Only simple scalar types are supported by this method. +func ValToConstant(v ref.Val) (*exprpb.Constant, error) { + if v == nil { + return nil, nil + } + switch v.Type() { + case types.BoolType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: v.Value().(bool)}}, nil + case types.BytesType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: v.Value().([]byte)}}, nil + case types.DoubleType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: v.Value().(float64)}}, nil + case types.IntType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: v.Value().(int64)}}, nil + case types.NullType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: structpb.NullValue_NULL_VALUE}}, nil + case types.StringType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: v.Value().(string)}}, nil + case types.UintType: + return &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: v.Value().(uint64)}}, nil + } + return nil, fmt.Errorf("unsupported constant kind: %v", v.Type()) +} + +// ConstantToVal converts a protobuf Constant to a CEL-native ref.Val. +func ConstantToVal(c *exprpb.Constant) (ref.Val, error) { + if c == nil { + return nil, nil + } + switch c.GetConstantKind().(type) { + case *exprpb.Constant_BoolValue: + return types.Bool(c.GetBoolValue()), nil + case *exprpb.Constant_BytesValue: + return types.Bytes(c.GetBytesValue()), nil + case *exprpb.Constant_DoubleValue: + return types.Double(c.GetDoubleValue()), nil + case *exprpb.Constant_Int64Value: + return types.Int(c.GetInt64Value()), nil + case *exprpb.Constant_NullValue: + return types.NullValue, nil + case *exprpb.Constant_StringValue: + return types.String(c.GetStringValue()), nil + case *exprpb.Constant_Uint64Value: + return types.Uint(c.GetUint64Value()), nil + } + return nil, fmt.Errorf("unsupported constant kind: %v", c.GetConstantKind()) +} diff --git a/vendor/github.com/google/cel-go/common/ast/expr.go b/vendor/github.com/google/cel-go/common/ast/expr.go new file mode 100644 index 000000000..c9d88bbaa --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/expr.go @@ -0,0 +1,860 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "github.com/google/cel-go/common/types/ref" +) + +// ExprKind represents the expression node kind. +type ExprKind int + +const ( + // UnspecifiedExprKind represents an unset expression with no specified properties. + UnspecifiedExprKind ExprKind = iota + + // CallKind represents a function call. + CallKind + + // ComprehensionKind represents a comprehension expression generated by a macro. + ComprehensionKind + + // IdentKind represents a simple variable, constant, or type identifier. + IdentKind + + // ListKind represents a list literal expression. + ListKind + + // LiteralKind represents a primitive scalar literal. + LiteralKind + + // MapKind represents a map literal expression. + MapKind + + // SelectKind represents a field selection expression. + SelectKind + + // StructKind represents a struct literal expression. + StructKind +) + +// Expr represents the base expression node in a CEL abstract syntax tree. +// +// Depending on the `Kind()` value, the Expr may be converted to a concrete expression types +// as indicated by the `As` methods. +type Expr interface { + // ID of the expression as it appears in the AST + ID() int64 + + // Kind of the expression node. See ExprKind for the valid enum values. + Kind() ExprKind + + // AsCall adapts the expr into a CallExpr + // + // The Kind() must be equal to a CallKind for the conversion to be well-defined. + AsCall() CallExpr + + // AsComprehension adapts the expr into a ComprehensionExpr. + // + // The Kind() must be equal to a ComprehensionKind for the conversion to be well-defined. + AsComprehension() ComprehensionExpr + + // AsIdent adapts the expr into an identifier string. + // + // The Kind() must be equal to an IdentKind for the conversion to be well-defined. + AsIdent() string + + // AsLiteral adapts the expr into a constant ref.Val. + // + // The Kind() must be equal to a LiteralKind for the conversion to be well-defined. + AsLiteral() ref.Val + + // AsList adapts the expr into a ListExpr. + // + // The Kind() must be equal to a ListKind for the conversion to be well-defined. + AsList() ListExpr + + // AsMap adapts the expr into a MapExpr. + // + // The Kind() must be equal to a MapKind for the conversion to be well-defined. + AsMap() MapExpr + + // AsSelect adapts the expr into a SelectExpr. + // + // The Kind() must be equal to a SelectKind for the conversion to be well-defined. + AsSelect() SelectExpr + + // AsStruct adapts the expr into a StructExpr. + // + // The Kind() must be equal to a StructKind for the conversion to be well-defined. + AsStruct() StructExpr + + // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids. + RenumberIDs(IDGenerator) + + // SetKindCase replaces the contents of the current expression with the contents of the other. + // + // The SetKindCase takes ownership of any expression instances references within the input Expr. + // A shallow copy is made of the Expr value itself, but not a deep one. + // + // This method should only be used during AST rewrites using temporary Expr values. + SetKindCase(Expr) + + // isExpr is a marker interface. + isExpr() +} + +// EntryExprKind represents the possible EntryExpr kinds. +type EntryExprKind int + +const ( + // UnspecifiedEntryExprKind indicates that the entry expr is not set. + UnspecifiedEntryExprKind EntryExprKind = iota + + // MapEntryKind indicates that the entry is a MapEntry type with key and value expressions. + MapEntryKind + + // StructFieldKind indicates that the entry is a StructField with a field name and initializer + // expression. + StructFieldKind +) + +// EntryExpr represents the base entry expression in a CEL map or struct literal. +type EntryExpr interface { + // ID of the entry as it appears in the AST. + ID() int64 + + // Kind of the entry expression node. See EntryExprKind for valid enum values. + Kind() EntryExprKind + + // AsMapEntry casts the EntryExpr to a MapEntry. + // + // The Kind() must be equal to MapEntryKind for the conversion to be well-defined. + AsMapEntry() MapEntry + + // AsStructField casts the EntryExpr to a StructField + // + // The Kind() must be equal to StructFieldKind for the conversion to be well-defined. + AsStructField() StructField + + // RenumberIDs performs an in-place update of the expression and all of its descendents numeric ids. + RenumberIDs(IDGenerator) + + isEntryExpr() +} + +// IDGenerator produces unique ids suitable for tagging expression nodes +type IDGenerator func(originalID int64) int64 + +// CallExpr defines an interface for inspecting a function call and its arugments. +type CallExpr interface { + // FunctionName returns the name of the function. + FunctionName() string + + // IsMemberFunction returns whether the call has a non-nil target indicating it is a member function + IsMemberFunction() bool + + // Target returns the target of the expression if one is present. + Target() Expr + + // Args returns the list of call arguments, excluding the target. + Args() []Expr + + // marker interface method + isExpr() +} + +// ListExpr defines an interface for inspecting a list literal expression. +type ListExpr interface { + // Elements returns the list elements as navigable expressions. + Elements() []Expr + + // OptionalIndicies returns the list of optional indices in the list literal. + OptionalIndices() []int32 + + // IsOptional indicates whether the given element index is optional. + IsOptional(int32) bool + + // Size returns the number of elements in the list. + Size() int + + // marker interface method + isExpr() +} + +// SelectExpr defines an interface for inspecting a select expression. +type SelectExpr interface { + // Operand returns the selection operand expression. + Operand() Expr + + // FieldName returns the field name being selected from the operand. + FieldName() string + + // IsTestOnly indicates whether the select expression is a presence test generated by a macro. + IsTestOnly() bool + + // marker interface method + isExpr() +} + +// MapExpr defines an interface for inspecting a map expression. +type MapExpr interface { + // Entries returns the map key value pairs as EntryExpr values. + Entries() []EntryExpr + + // Size returns the number of entries in the map. + Size() int + + // marker interface method + isExpr() +} + +// MapEntry defines an interface for inspecting a map entry. +type MapEntry interface { + // Key returns the map entry key expression. + Key() Expr + + // Value returns the map entry value expression. + Value() Expr + + // IsOptional returns whether the entry is optional. + IsOptional() bool + + // marker interface method + isEntryExpr() +} + +// StructExpr defines an interfaces for inspecting a struct and its field initializers. +type StructExpr interface { + // TypeName returns the struct type name. + TypeName() string + + // Fields returns the set of field initializers in the struct expression as EntryExpr values. + Fields() []EntryExpr + + // marker interface method + isExpr() +} + +// StructField defines an interface for inspecting a struct field initialization. +type StructField interface { + // Name returns the name of the field. + Name() string + + // Value returns the field initialization expression. + Value() Expr + + // IsOptional returns whether the field is optional. + IsOptional() bool + + // marker interface method + isEntryExpr() +} + +// ComprehensionExpr defines an interface for inspecting a comprehension expression. +type ComprehensionExpr interface { + // IterRange returns the iteration range expression. + IterRange() Expr + + // IterVar returns the iteration variable name. + IterVar() string + + // AccuVar returns the accumulation variable name. + AccuVar() string + + // AccuInit returns the accumulation variable initialization expression. + AccuInit() Expr + + // LoopCondition returns the loop condition expression. + LoopCondition() Expr + + // LoopStep returns the loop step expression. + LoopStep() Expr + + // Result returns the comprehension result expression. + Result() Expr + + // marker interface method + isExpr() +} + +var _ Expr = &expr{} + +type expr struct { + id int64 + exprKindCase +} + +type exprKindCase interface { + Kind() ExprKind + + renumberIDs(IDGenerator) + + isExpr() +} + +func (e *expr) ID() int64 { + if e == nil { + return 0 + } + return e.id +} + +func (e *expr) Kind() ExprKind { + if e == nil || e.exprKindCase == nil { + return UnspecifiedExprKind + } + return e.exprKindCase.Kind() +} + +func (e *expr) AsCall() CallExpr { + if e.Kind() != CallKind { + return nilCall + } + return e.exprKindCase.(CallExpr) +} + +func (e *expr) AsComprehension() ComprehensionExpr { + if e.Kind() != ComprehensionKind { + return nilCompre + } + return e.exprKindCase.(ComprehensionExpr) +} + +func (e *expr) AsIdent() string { + if e.Kind() != IdentKind { + return "" + } + return string(e.exprKindCase.(baseIdentExpr)) +} + +func (e *expr) AsLiteral() ref.Val { + if e.Kind() != LiteralKind { + return nil + } + return e.exprKindCase.(*baseLiteral).Val +} + +func (e *expr) AsList() ListExpr { + if e.Kind() != ListKind { + return nilList + } + return e.exprKindCase.(ListExpr) +} + +func (e *expr) AsMap() MapExpr { + if e.Kind() != MapKind { + return nilMap + } + return e.exprKindCase.(MapExpr) +} + +func (e *expr) AsSelect() SelectExpr { + if e.Kind() != SelectKind { + return nilSel + } + return e.exprKindCase.(SelectExpr) +} + +func (e *expr) AsStruct() StructExpr { + if e.Kind() != StructKind { + return nilStruct + } + return e.exprKindCase.(StructExpr) +} + +func (e *expr) SetKindCase(other Expr) { + if e == nil { + return + } + if other == nil { + e.exprKindCase = nil + return + } + switch other.Kind() { + case CallKind: + c := other.AsCall() + e.exprKindCase = &baseCallExpr{ + function: c.FunctionName(), + target: c.Target(), + args: c.Args(), + isMember: c.IsMemberFunction(), + } + case ComprehensionKind: + c := other.AsComprehension() + e.exprKindCase = &baseComprehensionExpr{ + iterRange: c.IterRange(), + iterVar: c.IterVar(), + accuVar: c.AccuVar(), + accuInit: c.AccuInit(), + loopCond: c.LoopCondition(), + loopStep: c.LoopStep(), + result: c.Result(), + } + case IdentKind: + e.exprKindCase = baseIdentExpr(other.AsIdent()) + case ListKind: + l := other.AsList() + optIndexMap := make(map[int32]struct{}, len(l.OptionalIndices())) + for _, idx := range l.OptionalIndices() { + optIndexMap[idx] = struct{}{} + } + e.exprKindCase = &baseListExpr{ + elements: l.Elements(), + optIndices: l.OptionalIndices(), + optIndexMap: optIndexMap, + } + case LiteralKind: + e.exprKindCase = &baseLiteral{Val: other.AsLiteral()} + case MapKind: + e.exprKindCase = &baseMapExpr{ + entries: other.AsMap().Entries(), + } + case SelectKind: + s := other.AsSelect() + e.exprKindCase = &baseSelectExpr{ + operand: s.Operand(), + field: s.FieldName(), + testOnly: s.IsTestOnly(), + } + case StructKind: + s := other.AsStruct() + e.exprKindCase = &baseStructExpr{ + typeName: s.TypeName(), + fields: s.Fields(), + } + case UnspecifiedExprKind: + e.exprKindCase = nil + } +} + +func (e *expr) RenumberIDs(idGen IDGenerator) { + if e == nil { + return + } + e.id = idGen(e.id) + if e.exprKindCase != nil { + e.exprKindCase.renumberIDs(idGen) + } +} + +type baseCallExpr struct { + function string + target Expr + args []Expr + isMember bool +} + +func (*baseCallExpr) Kind() ExprKind { + return CallKind +} + +func (e *baseCallExpr) FunctionName() string { + if e == nil { + return "" + } + return e.function +} + +func (e *baseCallExpr) IsMemberFunction() bool { + if e == nil { + return false + } + return e.isMember +} + +func (e *baseCallExpr) Target() Expr { + if e == nil || !e.IsMemberFunction() { + return nilExpr + } + return e.target +} + +func (e *baseCallExpr) Args() []Expr { + if e == nil { + return []Expr{} + } + return e.args +} + +func (e *baseCallExpr) renumberIDs(idGen IDGenerator) { + if e.IsMemberFunction() { + e.Target().RenumberIDs(idGen) + } + for _, arg := range e.Args() { + arg.RenumberIDs(idGen) + } +} + +func (*baseCallExpr) isExpr() {} + +var _ ComprehensionExpr = &baseComprehensionExpr{} + +type baseComprehensionExpr struct { + iterRange Expr + iterVar string + accuVar string + accuInit Expr + loopCond Expr + loopStep Expr + result Expr +} + +func (*baseComprehensionExpr) Kind() ExprKind { + return ComprehensionKind +} + +func (e *baseComprehensionExpr) IterRange() Expr { + if e == nil { + return nilExpr + } + return e.iterRange +} + +func (e *baseComprehensionExpr) IterVar() string { + return e.iterVar +} + +func (e *baseComprehensionExpr) AccuVar() string { + return e.accuVar +} + +func (e *baseComprehensionExpr) AccuInit() Expr { + if e == nil { + return nilExpr + } + return e.accuInit +} + +func (e *baseComprehensionExpr) LoopCondition() Expr { + if e == nil { + return nilExpr + } + return e.loopCond +} + +func (e *baseComprehensionExpr) LoopStep() Expr { + if e == nil { + return nilExpr + } + return e.loopStep +} + +func (e *baseComprehensionExpr) Result() Expr { + if e == nil { + return nilExpr + } + return e.result +} + +func (e *baseComprehensionExpr) renumberIDs(idGen IDGenerator) { + e.IterRange().RenumberIDs(idGen) + e.AccuInit().RenumberIDs(idGen) + e.LoopCondition().RenumberIDs(idGen) + e.LoopStep().RenumberIDs(idGen) + e.Result().RenumberIDs(idGen) +} + +func (*baseComprehensionExpr) isExpr() {} + +var _ exprKindCase = baseIdentExpr("") + +type baseIdentExpr string + +func (baseIdentExpr) Kind() ExprKind { + return IdentKind +} + +func (e baseIdentExpr) renumberIDs(IDGenerator) {} + +func (baseIdentExpr) isExpr() {} + +var _ exprKindCase = &baseLiteral{} +var _ ref.Val = &baseLiteral{} + +type baseLiteral struct { + ref.Val +} + +func (*baseLiteral) Kind() ExprKind { + return LiteralKind +} + +func (l *baseLiteral) renumberIDs(IDGenerator) {} + +func (*baseLiteral) isExpr() {} + +var _ ListExpr = &baseListExpr{} + +type baseListExpr struct { + elements []Expr + optIndices []int32 + optIndexMap map[int32]struct{} +} + +func (*baseListExpr) Kind() ExprKind { + return ListKind +} + +func (e *baseListExpr) Elements() []Expr { + if e == nil { + return []Expr{} + } + return e.elements +} + +func (e *baseListExpr) IsOptional(index int32) bool { + _, found := e.optIndexMap[index] + return found +} + +func (e *baseListExpr) OptionalIndices() []int32 { + if e == nil { + return []int32{} + } + return e.optIndices +} + +func (e *baseListExpr) Size() int { + return len(e.Elements()) +} + +func (e *baseListExpr) renumberIDs(idGen IDGenerator) { + for _, elem := range e.Elements() { + elem.RenumberIDs(idGen) + } +} + +func (*baseListExpr) isExpr() {} + +type baseMapExpr struct { + entries []EntryExpr +} + +func (*baseMapExpr) Kind() ExprKind { + return MapKind +} + +func (e *baseMapExpr) Entries() []EntryExpr { + if e == nil { + return []EntryExpr{} + } + return e.entries +} + +func (e *baseMapExpr) Size() int { + return len(e.Entries()) +} + +func (e *baseMapExpr) renumberIDs(idGen IDGenerator) { + for _, entry := range e.Entries() { + entry.RenumberIDs(idGen) + } +} + +func (*baseMapExpr) isExpr() {} + +type baseSelectExpr struct { + operand Expr + field string + testOnly bool +} + +func (*baseSelectExpr) Kind() ExprKind { + return SelectKind +} + +func (e *baseSelectExpr) Operand() Expr { + if e == nil || e.operand == nil { + return nilExpr + } + return e.operand +} + +func (e *baseSelectExpr) FieldName() string { + if e == nil { + return "" + } + return e.field +} + +func (e *baseSelectExpr) IsTestOnly() bool { + if e == nil { + return false + } + return e.testOnly +} + +func (e *baseSelectExpr) renumberIDs(idGen IDGenerator) { + e.Operand().RenumberIDs(idGen) +} + +func (*baseSelectExpr) isExpr() {} + +type baseStructExpr struct { + typeName string + fields []EntryExpr +} + +func (*baseStructExpr) Kind() ExprKind { + return StructKind +} + +func (e *baseStructExpr) TypeName() string { + if e == nil { + return "" + } + return e.typeName +} + +func (e *baseStructExpr) Fields() []EntryExpr { + if e == nil { + return []EntryExpr{} + } + return e.fields +} + +func (e *baseStructExpr) renumberIDs(idGen IDGenerator) { + for _, f := range e.Fields() { + f.RenumberIDs(idGen) + } +} + +func (*baseStructExpr) isExpr() {} + +type entryExprKindCase interface { + Kind() EntryExprKind + + renumberIDs(IDGenerator) + + isEntryExpr() +} + +var _ EntryExpr = &entryExpr{} + +type entryExpr struct { + id int64 + entryExprKindCase +} + +func (e *entryExpr) ID() int64 { + return e.id +} + +func (e *entryExpr) AsMapEntry() MapEntry { + if e.Kind() != MapEntryKind { + return nilMapEntry + } + return e.entryExprKindCase.(MapEntry) +} + +func (e *entryExpr) AsStructField() StructField { + if e.Kind() != StructFieldKind { + return nilStructField + } + return e.entryExprKindCase.(StructField) +} + +func (e *entryExpr) RenumberIDs(idGen IDGenerator) { + e.id = idGen(e.id) + e.entryExprKindCase.renumberIDs(idGen) +} + +type baseMapEntry struct { + key Expr + value Expr + isOptional bool +} + +func (e *baseMapEntry) Kind() EntryExprKind { + return MapEntryKind +} + +func (e *baseMapEntry) Key() Expr { + if e == nil { + return nilExpr + } + return e.key +} + +func (e *baseMapEntry) Value() Expr { + if e == nil { + return nilExpr + } + return e.value +} + +func (e *baseMapEntry) IsOptional() bool { + if e == nil { + return false + } + return e.isOptional +} + +func (e *baseMapEntry) renumberIDs(idGen IDGenerator) { + e.Key().RenumberIDs(idGen) + e.Value().RenumberIDs(idGen) +} + +func (*baseMapEntry) isEntryExpr() {} + +type baseStructField struct { + field string + value Expr + isOptional bool +} + +func (f *baseStructField) Kind() EntryExprKind { + return StructFieldKind +} + +func (f *baseStructField) Name() string { + if f == nil { + return "" + } + return f.field +} + +func (f *baseStructField) Value() Expr { + if f == nil { + return nilExpr + } + return f.value +} + +func (f *baseStructField) IsOptional() bool { + if f == nil { + return false + } + return f.isOptional +} + +func (f *baseStructField) renumberIDs(idGen IDGenerator) { + f.Value().RenumberIDs(idGen) +} + +func (*baseStructField) isEntryExpr() {} + +var ( + nilExpr *expr = nil + nilCall *baseCallExpr = nil + nilCompre *baseComprehensionExpr = nil + nilList *baseListExpr = nil + nilMap *baseMapExpr = nil + nilMapEntry *baseMapEntry = nil + nilSel *baseSelectExpr = nil + nilStruct *baseStructExpr = nil + nilStructField *baseStructField = nil +) diff --git a/vendor/github.com/google/cel-go/common/ast/factory.go b/vendor/github.com/google/cel-go/common/ast/factory.go new file mode 100644 index 000000000..b7f36e72a --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/factory.go @@ -0,0 +1,303 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import "github.com/google/cel-go/common/types/ref" + +// ExprFactory interfaces defines a set of methods necessary for building native expression values. +type ExprFactory interface { + // CopyExpr creates a deep copy of the input Expr value. + CopyExpr(Expr) Expr + + // CopyEntryExpr creates a deep copy of the input EntryExpr value. + CopyEntryExpr(EntryExpr) EntryExpr + + // NewCall creates an Expr value representing a global function call. + NewCall(id int64, function string, args ...Expr) Expr + + // NewComprehension creates an Expr value representing a comprehension over a value range. + NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCondition, loopStep, result Expr) Expr + + // NewMemberCall creates an Expr value representing a member function call. + NewMemberCall(id int64, function string, receiver Expr, args ...Expr) Expr + + // NewIdent creates an Expr value representing an identifier. + NewIdent(id int64, name string) Expr + + // NewAccuIdent creates an Expr value representing an accumulator identifier within a + //comprehension. + NewAccuIdent(id int64) Expr + + // NewLiteral creates an Expr value representing a literal value, such as a string or integer. + NewLiteral(id int64, value ref.Val) Expr + + // NewList creates an Expr value representing a list literal expression with optional indices. + // + // Optional indicies will typically be empty unless the CEL optional types are enabled. + NewList(id int64, elems []Expr, optIndices []int32) Expr + + // NewMap creates an Expr value representing a map literal expression + NewMap(id int64, entries []EntryExpr) Expr + + // NewMapEntry creates a MapEntry with a given key, value, and a flag indicating whether + // the key is optionally set. + NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr + + // NewPresenceTest creates an Expr representing a field presence test on an operand expression. + NewPresenceTest(id int64, operand Expr, field string) Expr + + // NewSelect creates an Expr representing a field selection on an operand expression. + NewSelect(id int64, operand Expr, field string) Expr + + // NewStruct creates an Expr value representing a struct literal with a given type name and a + // set of field initializers. + NewStruct(id int64, typeName string, fields []EntryExpr) Expr + + // NewStructField creates a StructField with a given field name, value, and a flag indicating + // whether the field is optionally set. + NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr + + // NewUnspecifiedExpr creates an empty expression node. + NewUnspecifiedExpr(id int64) Expr + + isExprFactory() +} + +type baseExprFactory struct{} + +// NewExprFactory creates an ExprFactory instance. +func NewExprFactory() ExprFactory { + return &baseExprFactory{} +} + +func (fac *baseExprFactory) NewCall(id int64, function string, args ...Expr) Expr { + if len(args) == 0 { + args = []Expr{} + } + return fac.newExpr( + id, + &baseCallExpr{ + function: function, + target: nilExpr, + args: args, + isMember: false, + }) +} + +func (fac *baseExprFactory) NewMemberCall(id int64, function string, target Expr, args ...Expr) Expr { + if len(args) == 0 { + args = []Expr{} + } + return fac.newExpr( + id, + &baseCallExpr{ + function: function, + target: target, + args: args, + isMember: true, + }) +} + +func (fac *baseExprFactory) NewComprehension(id int64, iterRange Expr, iterVar, accuVar string, accuInit, loopCond, loopStep, result Expr) Expr { + return fac.newExpr( + id, + &baseComprehensionExpr{ + iterRange: iterRange, + iterVar: iterVar, + accuVar: accuVar, + accuInit: accuInit, + loopCond: loopCond, + loopStep: loopStep, + result: result, + }) +} + +func (fac *baseExprFactory) NewIdent(id int64, name string) Expr { + return fac.newExpr(id, baseIdentExpr(name)) +} + +func (fac *baseExprFactory) NewAccuIdent(id int64) Expr { + return fac.NewIdent(id, "__result__") +} + +func (fac *baseExprFactory) NewLiteral(id int64, value ref.Val) Expr { + return fac.newExpr(id, &baseLiteral{Val: value}) +} + +func (fac *baseExprFactory) NewList(id int64, elems []Expr, optIndices []int32) Expr { + optIndexMap := make(map[int32]struct{}, len(optIndices)) + for _, idx := range optIndices { + optIndexMap[idx] = struct{}{} + } + return fac.newExpr(id, + &baseListExpr{ + elements: elems, + optIndices: optIndices, + optIndexMap: optIndexMap, + }) +} + +func (fac *baseExprFactory) NewMap(id int64, entries []EntryExpr) Expr { + return fac.newExpr(id, &baseMapExpr{entries: entries}) +} + +func (fac *baseExprFactory) NewMapEntry(id int64, key, value Expr, isOptional bool) EntryExpr { + return fac.newEntryExpr( + id, + &baseMapEntry{ + key: key, + value: value, + isOptional: isOptional, + }) +} + +func (fac *baseExprFactory) NewPresenceTest(id int64, operand Expr, field string) Expr { + return fac.newExpr( + id, + &baseSelectExpr{ + operand: operand, + field: field, + testOnly: true, + }) +} + +func (fac *baseExprFactory) NewSelect(id int64, operand Expr, field string) Expr { + return fac.newExpr( + id, + &baseSelectExpr{ + operand: operand, + field: field, + }) +} + +func (fac *baseExprFactory) NewStruct(id int64, typeName string, fields []EntryExpr) Expr { + return fac.newExpr( + id, + &baseStructExpr{ + typeName: typeName, + fields: fields, + }) +} + +func (fac *baseExprFactory) NewStructField(id int64, field string, value Expr, isOptional bool) EntryExpr { + return fac.newEntryExpr( + id, + &baseStructField{ + field: field, + value: value, + isOptional: isOptional, + }) +} + +func (fac *baseExprFactory) NewUnspecifiedExpr(id int64) Expr { + return fac.newExpr(id, nil) +} + +func (fac *baseExprFactory) CopyExpr(e Expr) Expr { + // unwrap navigable expressions to avoid unnecessary allocations during copying. + if nav, ok := e.(*navigableExprImpl); ok { + e = nav.Expr + } + switch e.Kind() { + case CallKind: + c := e.AsCall() + argsCopy := make([]Expr, len(c.Args())) + for i, arg := range c.Args() { + argsCopy[i] = fac.CopyExpr(arg) + } + if !c.IsMemberFunction() { + return fac.NewCall(e.ID(), c.FunctionName(), argsCopy...) + } + return fac.NewMemberCall(e.ID(), c.FunctionName(), fac.CopyExpr(c.Target()), argsCopy...) + case ComprehensionKind: + compre := e.AsComprehension() + return fac.NewComprehension(e.ID(), + fac.CopyExpr(compre.IterRange()), + compre.IterVar(), + compre.AccuVar(), + fac.CopyExpr(compre.AccuInit()), + fac.CopyExpr(compre.LoopCondition()), + fac.CopyExpr(compre.LoopStep()), + fac.CopyExpr(compre.Result())) + case IdentKind: + return fac.NewIdent(e.ID(), e.AsIdent()) + case ListKind: + l := e.AsList() + elemsCopy := make([]Expr, l.Size()) + for i, elem := range l.Elements() { + elemsCopy[i] = fac.CopyExpr(elem) + } + return fac.NewList(e.ID(), elemsCopy, l.OptionalIndices()) + case LiteralKind: + return fac.NewLiteral(e.ID(), e.AsLiteral()) + case MapKind: + m := e.AsMap() + entriesCopy := make([]EntryExpr, m.Size()) + for i, entry := range m.Entries() { + entriesCopy[i] = fac.CopyEntryExpr(entry) + } + return fac.NewMap(e.ID(), entriesCopy) + case SelectKind: + s := e.AsSelect() + if s.IsTestOnly() { + return fac.NewPresenceTest(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName()) + } + return fac.NewSelect(e.ID(), fac.CopyExpr(s.Operand()), s.FieldName()) + case StructKind: + s := e.AsStruct() + fieldsCopy := make([]EntryExpr, len(s.Fields())) + for i, field := range s.Fields() { + fieldsCopy[i] = fac.CopyEntryExpr(field) + } + return fac.NewStruct(e.ID(), s.TypeName(), fieldsCopy) + default: + return fac.NewUnspecifiedExpr(e.ID()) + } +} + +func (fac *baseExprFactory) CopyEntryExpr(e EntryExpr) EntryExpr { + switch e.Kind() { + case MapEntryKind: + entry := e.AsMapEntry() + return fac.NewMapEntry(e.ID(), + fac.CopyExpr(entry.Key()), fac.CopyExpr(entry.Value()), entry.IsOptional()) + case StructFieldKind: + field := e.AsStructField() + return fac.NewStructField(e.ID(), + field.Name(), fac.CopyExpr(field.Value()), field.IsOptional()) + default: + return fac.newEntryExpr(e.ID(), nil) + } +} + +func (*baseExprFactory) isExprFactory() {} + +func (fac *baseExprFactory) newExpr(id int64, e exprKindCase) Expr { + return &expr{ + id: id, + exprKindCase: e, + } +} + +func (fac *baseExprFactory) newEntryExpr(id int64, e entryExprKindCase) EntryExpr { + return &entryExpr{ + id: id, + entryExprKindCase: e, + } +} + +var ( + defaultFactory = &baseExprFactory{} +) diff --git a/vendor/github.com/google/cel-go/common/ast/navigable.go b/vendor/github.com/google/cel-go/common/ast/navigable.go new file mode 100644 index 000000000..f5ddf6aac --- /dev/null +++ b/vendor/github.com/google/cel-go/common/ast/navigable.go @@ -0,0 +1,652 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ast + +import ( + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" +) + +// NavigableExpr represents the base navigable expression value with methods to inspect the +// parent and child expressions. +type NavigableExpr interface { + Expr + + // Type of the expression. + // + // If the expression is type-checked, the type check metadata is returned. If the expression + // has not been type-checked, the types.DynType value is returned. + Type() *types.Type + + // Parent returns the parent expression node, if one exists. + Parent() (NavigableExpr, bool) + + // Children returns a list of child expression nodes. + Children() []NavigableExpr + + // Depth indicates the depth in the expression tree. + // + // The root expression has depth 0. + Depth() int +} + +// NavigateAST converts an AST to a NavigableExpr +func NavigateAST(ast *AST) NavigableExpr { + return NavigateExpr(ast, ast.Expr()) +} + +// NavigateExpr creates a NavigableExpr whose type information is backed by the input AST. +// +// If the expression is already a NavigableExpr, the parent and depth information will be +// propagated on the new NavigableExpr value; otherwise, the expr value will be treated +// as though it is the root of the expression graph with a depth of 0. +func NavigateExpr(ast *AST, expr Expr) NavigableExpr { + depth := 0 + var parent NavigableExpr = nil + if nav, ok := expr.(NavigableExpr); ok { + depth = nav.Depth() + parent, _ = nav.Parent() + } + return newNavigableExpr(ast, parent, expr, depth) +} + +// ExprMatcher takes a NavigableExpr in and indicates whether the value is a match. +// +// This function type should be use with the `Match` and `MatchList` calls. +type ExprMatcher func(NavigableExpr) bool + +// ConstantValueMatcher returns an ExprMatcher which will return true if the input NavigableExpr +// is comprised of all constant values, such as a simple literal or even list and map literal. +func ConstantValueMatcher() ExprMatcher { + return matchIsConstantValue +} + +// KindMatcher returns an ExprMatcher which will return true if the input NavigableExpr.Kind() matches +// the specified `kind`. +func KindMatcher(kind ExprKind) ExprMatcher { + return func(e NavigableExpr) bool { + return e.Kind() == kind + } +} + +// FunctionMatcher returns an ExprMatcher which will match NavigableExpr nodes of CallKind type whose +// function name is equal to `funcName`. +func FunctionMatcher(funcName string) ExprMatcher { + return func(e NavigableExpr) bool { + if e.Kind() != CallKind { + return false + } + return e.AsCall().FunctionName() == funcName + } +} + +// AllMatcher returns true for all descendants of a NavigableExpr, effectively flattening them into a list. +// +// Such a result would work well with subsequent MatchList calls. +func AllMatcher() ExprMatcher { + return func(NavigableExpr) bool { + return true + } +} + +// MatchDescendants takes a NavigableExpr and ExprMatcher and produces a list of NavigableExpr values +// matching the input criteria in post-order (bottom up). +func MatchDescendants(expr NavigableExpr, matcher ExprMatcher) []NavigableExpr { + matches := []NavigableExpr{} + navVisitor := &baseVisitor{ + visitExpr: func(e Expr) { + nav := e.(NavigableExpr) + if matcher(nav) { + matches = append(matches, nav) + } + }, + } + visit(expr, navVisitor, postOrder, 0, 0) + return matches +} + +// MatchSubset applies an ExprMatcher to a list of NavigableExpr values and their descendants, producing a +// subset of NavigableExpr values which match. +func MatchSubset(exprs []NavigableExpr, matcher ExprMatcher) []NavigableExpr { + matches := []NavigableExpr{} + navVisitor := &baseVisitor{ + visitExpr: func(e Expr) { + nav := e.(NavigableExpr) + if matcher(nav) { + matches = append(matches, nav) + } + }, + } + for _, expr := range exprs { + visit(expr, navVisitor, postOrder, 0, 1) + } + return matches +} + +// Visitor defines an object for visiting Expr and EntryExpr nodes within an expression graph. +type Visitor interface { + // VisitExpr visits the input expression. + VisitExpr(Expr) + + // VisitEntryExpr visits the input entry expression, i.e. a struct field or map entry. + VisitEntryExpr(EntryExpr) +} + +type baseVisitor struct { + visitExpr func(Expr) + visitEntryExpr func(EntryExpr) +} + +// VisitExpr visits the Expr if the internal expr visitor has been configured. +func (v *baseVisitor) VisitExpr(e Expr) { + if v.visitExpr != nil { + v.visitExpr(e) + } +} + +// VisitEntryExpr visits the entry if the internal expr entry visitor has been configured. +func (v *baseVisitor) VisitEntryExpr(e EntryExpr) { + if v.visitEntryExpr != nil { + v.visitEntryExpr(e) + } +} + +// NewExprVisitor creates a visitor which only visits expression nodes. +func NewExprVisitor(v func(Expr)) Visitor { + return &baseVisitor{ + visitExpr: v, + visitEntryExpr: nil, + } +} + +// PostOrderVisit walks the expression graph and calls the visitor in post-order (bottom-up). +func PostOrderVisit(expr Expr, visitor Visitor) { + visit(expr, visitor, postOrder, 0, 0) +} + +// PreOrderVisit walks the expression graph and calls the visitor in pre-order (top-down). +func PreOrderVisit(expr Expr, visitor Visitor) { + visit(expr, visitor, preOrder, 0, 0) +} + +type visitOrder int + +const ( + preOrder = iota + 1 + postOrder +) + +// TODO: consider exposing a way to configure a limit for the max visit depth. +// It's possible that we could want to configure this on the NewExprVisitor() +// and through MatchDescendents() / MaxID(). +func visit(expr Expr, visitor Visitor, order visitOrder, depth, maxDepth int) { + if maxDepth > 0 && depth == maxDepth { + return + } + if order == preOrder { + visitor.VisitExpr(expr) + } + switch expr.Kind() { + case CallKind: + c := expr.AsCall() + if c.IsMemberFunction() { + visit(c.Target(), visitor, order, depth+1, maxDepth) + } + for _, arg := range c.Args() { + visit(arg, visitor, order, depth+1, maxDepth) + } + case ComprehensionKind: + c := expr.AsComprehension() + visit(c.IterRange(), visitor, order, depth+1, maxDepth) + visit(c.AccuInit(), visitor, order, depth+1, maxDepth) + visit(c.LoopCondition(), visitor, order, depth+1, maxDepth) + visit(c.LoopStep(), visitor, order, depth+1, maxDepth) + visit(c.Result(), visitor, order, depth+1, maxDepth) + case ListKind: + l := expr.AsList() + for _, elem := range l.Elements() { + visit(elem, visitor, order, depth+1, maxDepth) + } + case MapKind: + m := expr.AsMap() + for _, e := range m.Entries() { + if order == preOrder { + visitor.VisitEntryExpr(e) + } + entry := e.AsMapEntry() + visit(entry.Key(), visitor, order, depth+1, maxDepth) + visit(entry.Value(), visitor, order, depth+1, maxDepth) + if order == postOrder { + visitor.VisitEntryExpr(e) + } + } + case SelectKind: + visit(expr.AsSelect().Operand(), visitor, order, depth+1, maxDepth) + case StructKind: + s := expr.AsStruct() + for _, f := range s.Fields() { + visitor.VisitEntryExpr(f) + visit(f.AsStructField().Value(), visitor, order, depth+1, maxDepth) + } + } + if order == postOrder { + visitor.VisitExpr(expr) + } +} + +func matchIsConstantValue(e NavigableExpr) bool { + if e.Kind() == LiteralKind { + return true + } + if e.Kind() == StructKind || e.Kind() == MapKind || e.Kind() == ListKind { + for _, child := range e.Children() { + if !matchIsConstantValue(child) { + return false + } + } + return true + } + return false +} + +func newNavigableExpr(ast *AST, parent NavigableExpr, expr Expr, depth int) NavigableExpr { + // Reduce navigable expression nesting by unwrapping the embedded Expr value. + if nav, ok := expr.(*navigableExprImpl); ok { + expr = nav.Expr + } + nav := &navigableExprImpl{ + Expr: expr, + depth: depth, + ast: ast, + parent: parent, + createChildren: getChildFactory(expr), + } + return nav +} + +type navigableExprImpl struct { + Expr + depth int + ast *AST + parent NavigableExpr + createChildren childFactory +} + +func (nav *navigableExprImpl) Parent() (NavigableExpr, bool) { + if nav.parent != nil { + return nav.parent, true + } + return nil, false +} + +func (nav *navigableExprImpl) ID() int64 { + return nav.Expr.ID() +} + +func (nav *navigableExprImpl) Kind() ExprKind { + return nav.Expr.Kind() +} + +func (nav *navigableExprImpl) Type() *types.Type { + return nav.ast.GetType(nav.ID()) +} + +func (nav *navigableExprImpl) Children() []NavigableExpr { + return nav.createChildren(nav) +} + +func (nav *navigableExprImpl) Depth() int { + return nav.depth +} + +func (nav *navigableExprImpl) AsCall() CallExpr { + return navigableCallImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsComprehension() ComprehensionExpr { + return navigableComprehensionImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsIdent() string { + return nav.Expr.AsIdent() +} + +func (nav *navigableExprImpl) AsList() ListExpr { + return navigableListImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsLiteral() ref.Val { + return nav.Expr.AsLiteral() +} + +func (nav *navigableExprImpl) AsMap() MapExpr { + return navigableMapImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsSelect() SelectExpr { + return navigableSelectImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) AsStruct() StructExpr { + return navigableStructImpl{navigableExprImpl: nav} +} + +func (nav *navigableExprImpl) createChild(e Expr) NavigableExpr { + return newNavigableExpr(nav.ast, nav, e, nav.depth+1) +} + +func (nav *navigableExprImpl) isExpr() {} + +type navigableCallImpl struct { + *navigableExprImpl +} + +func (call navigableCallImpl) FunctionName() string { + return call.Expr.AsCall().FunctionName() +} + +func (call navigableCallImpl) IsMemberFunction() bool { + return call.Expr.AsCall().IsMemberFunction() +} + +func (call navigableCallImpl) Target() Expr { + t := call.Expr.AsCall().Target() + if t != nil { + return call.createChild(t) + } + return nil +} + +func (call navigableCallImpl) Args() []Expr { + args := call.Expr.AsCall().Args() + navArgs := make([]Expr, len(args)) + for i, a := range args { + navArgs[i] = call.createChild(a) + } + return navArgs +} + +type navigableComprehensionImpl struct { + *navigableExprImpl +} + +func (comp navigableComprehensionImpl) IterRange() Expr { + return comp.createChild(comp.Expr.AsComprehension().IterRange()) +} + +func (comp navigableComprehensionImpl) IterVar() string { + return comp.Expr.AsComprehension().IterVar() +} + +func (comp navigableComprehensionImpl) AccuVar() string { + return comp.Expr.AsComprehension().AccuVar() +} + +func (comp navigableComprehensionImpl) AccuInit() Expr { + return comp.createChild(comp.Expr.AsComprehension().AccuInit()) +} + +func (comp navigableComprehensionImpl) LoopCondition() Expr { + return comp.createChild(comp.Expr.AsComprehension().LoopCondition()) +} + +func (comp navigableComprehensionImpl) LoopStep() Expr { + return comp.createChild(comp.Expr.AsComprehension().LoopStep()) +} + +func (comp navigableComprehensionImpl) Result() Expr { + return comp.createChild(comp.Expr.AsComprehension().Result()) +} + +type navigableListImpl struct { + *navigableExprImpl +} + +func (l navigableListImpl) Elements() []Expr { + pbElems := l.Expr.AsList().Elements() + elems := make([]Expr, len(pbElems)) + for i := 0; i < len(pbElems); i++ { + elems[i] = l.createChild(pbElems[i]) + } + return elems +} + +func (l navigableListImpl) IsOptional(index int32) bool { + return l.Expr.AsList().IsOptional(index) +} + +func (l navigableListImpl) OptionalIndices() []int32 { + return l.Expr.AsList().OptionalIndices() +} + +func (l navigableListImpl) Size() int { + return l.Expr.AsList().Size() +} + +type navigableMapImpl struct { + *navigableExprImpl +} + +func (m navigableMapImpl) Entries() []EntryExpr { + mapExpr := m.Expr.AsMap() + entries := make([]EntryExpr, len(mapExpr.Entries())) + for i, e := range mapExpr.Entries() { + entry := e.AsMapEntry() + entries[i] = &entryExpr{ + id: e.ID(), + entryExprKindCase: navigableEntryImpl{ + key: m.createChild(entry.Key()), + val: m.createChild(entry.Value()), + isOpt: entry.IsOptional(), + }, + } + } + return entries +} + +func (m navigableMapImpl) Size() int { + return m.Expr.AsMap().Size() +} + +type navigableEntryImpl struct { + key NavigableExpr + val NavigableExpr + isOpt bool +} + +func (e navigableEntryImpl) Kind() EntryExprKind { + return MapEntryKind +} + +func (e navigableEntryImpl) Key() Expr { + return e.key +} + +func (e navigableEntryImpl) Value() Expr { + return e.val +} + +func (e navigableEntryImpl) IsOptional() bool { + return e.isOpt +} + +func (e navigableEntryImpl) renumberIDs(IDGenerator) {} + +func (e navigableEntryImpl) isEntryExpr() {} + +type navigableSelectImpl struct { + *navigableExprImpl +} + +func (sel navigableSelectImpl) FieldName() string { + return sel.Expr.AsSelect().FieldName() +} + +func (sel navigableSelectImpl) IsTestOnly() bool { + return sel.Expr.AsSelect().IsTestOnly() +} + +func (sel navigableSelectImpl) Operand() Expr { + return sel.createChild(sel.Expr.AsSelect().Operand()) +} + +type navigableStructImpl struct { + *navigableExprImpl +} + +func (s navigableStructImpl) TypeName() string { + return s.Expr.AsStruct().TypeName() +} + +func (s navigableStructImpl) Fields() []EntryExpr { + fieldInits := s.Expr.AsStruct().Fields() + fields := make([]EntryExpr, len(fieldInits)) + for i, f := range fieldInits { + field := f.AsStructField() + fields[i] = &entryExpr{ + id: f.ID(), + entryExprKindCase: navigableFieldImpl{ + name: field.Name(), + val: s.createChild(field.Value()), + isOpt: field.IsOptional(), + }, + } + } + return fields +} + +type navigableFieldImpl struct { + name string + val NavigableExpr + isOpt bool +} + +func (f navigableFieldImpl) Kind() EntryExprKind { + return StructFieldKind +} + +func (f navigableFieldImpl) Name() string { + return f.name +} + +func (f navigableFieldImpl) Value() Expr { + return f.val +} + +func (f navigableFieldImpl) IsOptional() bool { + return f.isOpt +} + +func (f navigableFieldImpl) renumberIDs(IDGenerator) {} + +func (f navigableFieldImpl) isEntryExpr() {} + +func getChildFactory(expr Expr) childFactory { + if expr == nil { + return noopFactory + } + switch expr.Kind() { + case LiteralKind: + return noopFactory + case IdentKind: + return noopFactory + case SelectKind: + return selectFactory + case CallKind: + return callArgFactory + case ListKind: + return listElemFactory + case MapKind: + return mapEntryFactory + case StructKind: + return structEntryFactory + case ComprehensionKind: + return comprehensionFactory + default: + return noopFactory + } +} + +type childFactory func(*navigableExprImpl) []NavigableExpr + +func noopFactory(*navigableExprImpl) []NavigableExpr { + return nil +} + +func selectFactory(nav *navigableExprImpl) []NavigableExpr { + return []NavigableExpr{nav.createChild(nav.AsSelect().Operand())} +} + +func callArgFactory(nav *navigableExprImpl) []NavigableExpr { + call := nav.Expr.AsCall() + argCount := len(call.Args()) + if call.IsMemberFunction() { + argCount++ + } + navExprs := make([]NavigableExpr, argCount) + i := 0 + if call.IsMemberFunction() { + navExprs[i] = nav.createChild(call.Target()) + i++ + } + for _, arg := range call.Args() { + navExprs[i] = nav.createChild(arg) + i++ + } + return navExprs +} + +func listElemFactory(nav *navigableExprImpl) []NavigableExpr { + l := nav.Expr.AsList() + navExprs := make([]NavigableExpr, len(l.Elements())) + for i, e := range l.Elements() { + navExprs[i] = nav.createChild(e) + } + return navExprs +} + +func structEntryFactory(nav *navigableExprImpl) []NavigableExpr { + s := nav.Expr.AsStruct() + entries := make([]NavigableExpr, len(s.Fields())) + for i, e := range s.Fields() { + f := e.AsStructField() + entries[i] = nav.createChild(f.Value()) + } + return entries +} + +func mapEntryFactory(nav *navigableExprImpl) []NavigableExpr { + m := nav.Expr.AsMap() + entries := make([]NavigableExpr, len(m.Entries())*2) + j := 0 + for _, e := range m.Entries() { + mapEntry := e.AsMapEntry() + entries[j] = nav.createChild(mapEntry.Key()) + entries[j+1] = nav.createChild(mapEntry.Value()) + j += 2 + } + return entries +} + +func comprehensionFactory(nav *navigableExprImpl) []NavigableExpr { + compre := nav.Expr.AsComprehension() + return []NavigableExpr{ + nav.createChild(compre.IterRange()), + nav.createChild(compre.AccuInit()), + nav.createChild(compre.LoopCondition()), + nav.createChild(compre.LoopStep()), + nav.createChild(compre.Result()), + } +} diff --git a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel b/vendor/github.com/google/cel-go/common/containers/BUILD.bazel index 3f3f07887..81197f064 100644 --- a/vendor/github.com/google/cel-go/common/containers/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/containers/BUILD.bazel @@ -12,7 +12,7 @@ go_library( ], importpath = "github.com/google/cel-go/common/containers", deps = [ - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", ], ) @@ -26,6 +26,6 @@ go_test( ":go_default_library", ], deps = [ - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/common/containers/container.go b/vendor/github.com/google/cel-go/common/containers/container.go index d46698d3c..52153d4cd 100644 --- a/vendor/github.com/google/cel-go/common/containers/container.go +++ b/vendor/github.com/google/cel-go/common/containers/container.go @@ -20,7 +20,7 @@ import ( "fmt" "strings" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" ) var ( @@ -297,19 +297,19 @@ func Name(name string) ContainerOption { // ToQualifiedName converts an expression AST into a qualified name if possible, with a boolean // 'found' value that indicates if the conversion is successful. -func ToQualifiedName(e *exprpb.Expr) (string, bool) { - switch e.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - id := e.GetIdentExpr() - return id.GetName(), true - case *exprpb.Expr_SelectExpr: - sel := e.GetSelectExpr() +func ToQualifiedName(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.IdentKind: + id := e.AsIdent() + return id, true + case ast.SelectKind: + sel := e.AsSelect() // Test only expressions are not valid as qualified names. - if sel.GetTestOnly() { + if sel.IsTestOnly() { return "", false } - if qual, found := ToQualifiedName(sel.GetOperand()); found { - return qual + "." + sel.GetField(), true + if qual, found := ToQualifiedName(sel.Operand()); found { + return qual + "." + sel.FieldName(), true } } return "", false diff --git a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel b/vendor/github.com/google/cel-go/common/debug/BUILD.bazel index 1f029839c..724ed3404 100644 --- a/vendor/github.com/google/cel-go/common/debug/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/debug/BUILD.bazel @@ -13,6 +13,8 @@ go_library( importpath = "github.com/google/cel-go/common/debug", deps = [ "//common:go_default_library", - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "//common/ast:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/common/debug/debug.go b/vendor/github.com/google/cel-go/common/debug/debug.go index 5dab156ef..e4c01ac6e 100644 --- a/vendor/github.com/google/cel-go/common/debug/debug.go +++ b/vendor/github.com/google/cel-go/common/debug/debug.go @@ -22,7 +22,9 @@ import ( "strconv" "strings" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) // Adorner returns debug metadata that will be tacked on to the string @@ -38,7 +40,7 @@ type Writer interface { // Buffer pushes an expression into an internal queue of expressions to // write to a string. - Buffer(e *exprpb.Expr) + Buffer(e ast.Expr) } type emptyDebugAdorner struct { @@ -51,12 +53,12 @@ func (a *emptyDebugAdorner) GetMetadata(e any) string { } // ToDebugString gives the unadorned string representation of the Expr. -func ToDebugString(e *exprpb.Expr) string { +func ToDebugString(e ast.Expr) string { return ToAdornedDebugString(e, emptyAdorner) } // ToAdornedDebugString gives the adorned string representation of the Expr. -func ToAdornedDebugString(e *exprpb.Expr, adorner Adorner) string { +func ToAdornedDebugString(e ast.Expr, adorner Adorner) string { w := newDebugWriter(adorner) w.Buffer(e) return w.String() @@ -78,49 +80,51 @@ func newDebugWriter(a Adorner) *debugWriter { } } -func (w *debugWriter) Buffer(e *exprpb.Expr) { +func (w *debugWriter) Buffer(e ast.Expr) { if e == nil { return } - switch e.ExprKind.(type) { - case *exprpb.Expr_ConstExpr: - w.append(formatLiteral(e.GetConstExpr())) - case *exprpb.Expr_IdentExpr: - w.append(e.GetIdentExpr().Name) - case *exprpb.Expr_SelectExpr: - w.appendSelect(e.GetSelectExpr()) - case *exprpb.Expr_CallExpr: - w.appendCall(e.GetCallExpr()) - case *exprpb.Expr_ListExpr: - w.appendList(e.GetListExpr()) - case *exprpb.Expr_StructExpr: - w.appendStruct(e.GetStructExpr()) - case *exprpb.Expr_ComprehensionExpr: - w.appendComprehension(e.GetComprehensionExpr()) + switch e.Kind() { + case ast.LiteralKind: + w.append(formatLiteral(e.AsLiteral())) + case ast.IdentKind: + w.append(e.AsIdent()) + case ast.SelectKind: + w.appendSelect(e.AsSelect()) + case ast.CallKind: + w.appendCall(e.AsCall()) + case ast.ListKind: + w.appendList(e.AsList()) + case ast.MapKind: + w.appendMap(e.AsMap()) + case ast.StructKind: + w.appendStruct(e.AsStruct()) + case ast.ComprehensionKind: + w.appendComprehension(e.AsComprehension()) } w.adorn(e) } -func (w *debugWriter) appendSelect(sel *exprpb.Expr_Select) { - w.Buffer(sel.GetOperand()) +func (w *debugWriter) appendSelect(sel ast.SelectExpr) { + w.Buffer(sel.Operand()) w.append(".") - w.append(sel.GetField()) - if sel.TestOnly { + w.append(sel.FieldName()) + if sel.IsTestOnly() { w.append("~test-only~") } } -func (w *debugWriter) appendCall(call *exprpb.Expr_Call) { - if call.Target != nil { - w.Buffer(call.GetTarget()) +func (w *debugWriter) appendCall(call ast.CallExpr) { + if call.IsMemberFunction() { + w.Buffer(call.Target()) w.append(".") } - w.append(call.GetFunction()) + w.append(call.FunctionName()) w.append("(") - if len(call.GetArgs()) > 0 { + if len(call.Args()) > 0 { w.addIndent() w.appendLine() - for i, arg := range call.GetArgs() { + for i, arg := range call.Args() { if i > 0 { w.append(",") w.appendLine() @@ -133,12 +137,12 @@ func (w *debugWriter) appendCall(call *exprpb.Expr_Call) { w.append(")") } -func (w *debugWriter) appendList(list *exprpb.Expr_CreateList) { +func (w *debugWriter) appendList(list ast.ListExpr) { w.append("[") - if len(list.GetElements()) > 0 { + if len(list.Elements()) > 0 { w.appendLine() w.addIndent() - for i, elem := range list.GetElements() { + for i, elem := range list.Elements() { if i > 0 { w.append(",") w.appendLine() @@ -151,32 +155,25 @@ func (w *debugWriter) appendList(list *exprpb.Expr_CreateList) { w.append("]") } -func (w *debugWriter) appendStruct(obj *exprpb.Expr_CreateStruct) { - if obj.MessageName != "" { - w.appendObject(obj) - } else { - w.appendMap(obj) - } -} - -func (w *debugWriter) appendObject(obj *exprpb.Expr_CreateStruct) { - w.append(obj.GetMessageName()) +func (w *debugWriter) appendStruct(obj ast.StructExpr) { + w.append(obj.TypeName()) w.append("{") - if len(obj.GetEntries()) > 0 { + if len(obj.Fields()) > 0 { w.appendLine() w.addIndent() - for i, entry := range obj.GetEntries() { + for i, f := range obj.Fields() { + field := f.AsStructField() if i > 0 { w.append(",") w.appendLine() } - if entry.GetOptionalEntry() { + if field.IsOptional() { w.append("?") } - w.append(entry.GetFieldKey()) + w.append(field.Name()) w.append(":") - w.Buffer(entry.GetValue()) - w.adorn(entry) + w.Buffer(field.Value()) + w.adorn(f) } w.removeIndent() w.appendLine() @@ -184,23 +181,24 @@ func (w *debugWriter) appendObject(obj *exprpb.Expr_CreateStruct) { w.append("}") } -func (w *debugWriter) appendMap(obj *exprpb.Expr_CreateStruct) { +func (w *debugWriter) appendMap(m ast.MapExpr) { w.append("{") - if len(obj.GetEntries()) > 0 { + if m.Size() > 0 { w.appendLine() w.addIndent() - for i, entry := range obj.GetEntries() { + for i, e := range m.Entries() { + entry := e.AsMapEntry() if i > 0 { w.append(",") w.appendLine() } - if entry.GetOptionalEntry() { + if entry.IsOptional() { w.append("?") } - w.Buffer(entry.GetMapKey()) + w.Buffer(entry.Key()) w.append(":") - w.Buffer(entry.GetValue()) - w.adorn(entry) + w.Buffer(entry.Value()) + w.adorn(e) } w.removeIndent() w.appendLine() @@ -208,62 +206,62 @@ func (w *debugWriter) appendMap(obj *exprpb.Expr_CreateStruct) { w.append("}") } -func (w *debugWriter) appendComprehension(comprehension *exprpb.Expr_Comprehension) { +func (w *debugWriter) appendComprehension(comprehension ast.ComprehensionExpr) { w.append("__comprehension__(") w.addIndent() w.appendLine() w.append("// Variable") w.appendLine() - w.append(comprehension.GetIterVar()) + w.append(comprehension.IterVar()) w.append(",") w.appendLine() w.append("// Target") w.appendLine() - w.Buffer(comprehension.GetIterRange()) + w.Buffer(comprehension.IterRange()) w.append(",") w.appendLine() w.append("// Accumulator") w.appendLine() - w.append(comprehension.GetAccuVar()) + w.append(comprehension.AccuVar()) w.append(",") w.appendLine() w.append("// Init") w.appendLine() - w.Buffer(comprehension.GetAccuInit()) + w.Buffer(comprehension.AccuInit()) w.append(",") w.appendLine() w.append("// LoopCondition") w.appendLine() - w.Buffer(comprehension.GetLoopCondition()) + w.Buffer(comprehension.LoopCondition()) w.append(",") w.appendLine() w.append("// LoopStep") w.appendLine() - w.Buffer(comprehension.GetLoopStep()) + w.Buffer(comprehension.LoopStep()) w.append(",") w.appendLine() w.append("// Result") w.appendLine() - w.Buffer(comprehension.GetResult()) + w.Buffer(comprehension.Result()) w.append(")") w.removeIndent() } -func formatLiteral(c *exprpb.Constant) string { - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - return fmt.Sprintf("%t", c.GetBoolValue()) - case *exprpb.Constant_BytesValue: - return fmt.Sprintf("b\"%s\"", string(c.GetBytesValue())) - case *exprpb.Constant_DoubleValue: - return fmt.Sprintf("%v", c.GetDoubleValue()) - case *exprpb.Constant_Int64Value: - return fmt.Sprintf("%d", c.GetInt64Value()) - case *exprpb.Constant_StringValue: - return strconv.Quote(c.GetStringValue()) - case *exprpb.Constant_Uint64Value: - return fmt.Sprintf("%du", c.GetUint64Value()) - case *exprpb.Constant_NullValue: +func formatLiteral(c ref.Val) string { + switch v := c.(type) { + case types.Bool: + return fmt.Sprintf("%t", v) + case types.Bytes: + return fmt.Sprintf("b\"%s\"", string(v)) + case types.Double: + return fmt.Sprintf("%v", float64(v)) + case types.Int: + return fmt.Sprintf("%d", int64(v)) + case types.String: + return strconv.Quote(string(v)) + case types.Uint: + return fmt.Sprintf("%du", uint64(v)) + case types.Null: return "null" default: panic("Unknown constant type") diff --git a/vendor/github.com/google/cel-go/common/decls/BUILD.bazel b/vendor/github.com/google/cel-go/common/decls/BUILD.bazel new file mode 100644 index 000000000..17791dce6 --- /dev/null +++ b/vendor/github.com/google/cel-go/common/decls/BUILD.bazel @@ -0,0 +1,39 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], # Apache 2.0 +) + +go_library( + name = "go_default_library", + srcs = [ + "decls.go", + ], + importpath = "github.com/google/cel-go/common/decls", + deps = [ + "//checker/decls:go_default_library", + "//common/functions:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", + "//common/types/traits:go_default_library", + "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + ], +) + +go_test( + name = "go_default_test", + srcs = [ + "decls_test.go", + ], + embed = [":go_default_library"], + deps = [ + "//checker/decls:go_default_library", + "//common/overloads:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", + "//common/types/traits:go_default_library", + "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + "@org_golang_google_protobuf//proto:go_default_library", + ], +) diff --git a/vendor/github.com/google/cel-go/common/decls/decls.go b/vendor/github.com/google/cel-go/common/decls/decls.go new file mode 100644 index 000000000..734ebe57e --- /dev/null +++ b/vendor/github.com/google/cel-go/common/decls/decls.go @@ -0,0 +1,844 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package decls contains function and variable declaration structs and helper methods. +package decls + +import ( + "fmt" + "strings" + + chkdecls "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/functions" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +// NewFunction creates a new function declaration with a set of function options to configure overloads +// and function definitions (implementations). +// +// Functions are checked for name collisions and singleton redefinition. +func NewFunction(name string, opts ...FunctionOpt) (*FunctionDecl, error) { + fn := &FunctionDecl{ + name: name, + overloads: map[string]*OverloadDecl{}, + overloadOrdinals: []string{}, + } + var err error + for _, opt := range opts { + fn, err = opt(fn) + if err != nil { + return nil, err + } + } + if len(fn.overloads) == 0 { + return nil, fmt.Errorf("function %s must have at least one overload", name) + } + return fn, nil +} + +// FunctionDecl defines a function name, overload set, and optionally a singleton definition for all +// overload instances. +type FunctionDecl struct { + name string + + // overloads associated with the function name. + overloads map[string]*OverloadDecl + + // singleton implementation of the function for all overloads. + // + // If this option is set, an error will occur if any overloads specify a per-overload implementation + // or if another function with the same name attempts to redefine the singleton. + singleton *functions.Overload + + // disableTypeGuards is a performance optimization to disable detailed runtime type checks which could + // add overhead on common operations. Setting this option true leaves error checks and argument checks + // intact. + disableTypeGuards bool + + // state indicates that the binding should be provided as a declaration, as a runtime binding, or both. + state declarationState + + // overloadOrdinals indicates the order in which the overload was declared. + overloadOrdinals []string +} + +type declarationState int + +const ( + declarationStateUnset declarationState = iota + declarationDisabled + declarationEnabled +) + +// Name returns the function name in human-readable terms, e.g. 'contains' of 'math.least' +func (f *FunctionDecl) Name() string { + if f == nil { + return "" + } + return f.name +} + +// IsDeclarationDisabled indicates that the function implementation should be added to the dispatcher, but the +// declaration should not be exposed for use in expressions. +func (f *FunctionDecl) IsDeclarationDisabled() bool { + return f.state == declarationDisabled +} + +// Merge combines an existing function declaration with another. +// +// If a function is extended, by say adding new overloads to an existing function, then it is merged with the +// prior definition of the function at which point its overloads must not collide with pre-existing overloads +// and its bindings (singleton, or per-overload) must not conflict with previous definitions either. +func (f *FunctionDecl) Merge(other *FunctionDecl) (*FunctionDecl, error) { + if f == other { + return f, nil + } + if f.Name() != other.Name() { + return nil, fmt.Errorf("cannot merge unrelated functions. %s and %s", f.Name(), other.Name()) + } + merged := &FunctionDecl{ + name: f.Name(), + overloads: make(map[string]*OverloadDecl, len(f.overloads)), + singleton: f.singleton, + overloadOrdinals: make([]string, len(f.overloads)), + // if one function is expecting type-guards and the other is not, then they + // must not be disabled. + disableTypeGuards: f.disableTypeGuards && other.disableTypeGuards, + // default to the current functions declaration state. + state: f.state, + } + // If the other state indicates that the declaration should be explicitly enabled or + // disabled, then update the merged state with the most recent value. + if other.state != declarationStateUnset { + merged.state = other.state + } + // baseline copy of the overloads and their ordinals + copy(merged.overloadOrdinals, f.overloadOrdinals) + for oID, o := range f.overloads { + merged.overloads[oID] = o + } + // overloads and their ordinals are added from the left + for _, oID := range other.overloadOrdinals { + o := other.overloads[oID] + err := merged.AddOverload(o) + if err != nil { + return nil, fmt.Errorf("function declaration merge failed: %v", err) + } + } + if other.singleton != nil { + if merged.singleton != nil && merged.singleton != other.singleton { + return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name()) + } + merged.singleton = other.singleton + } + return merged, nil +} + +// AddOverload ensures that the new overload does not collide with an existing overload signature; +// however, if the function signatures are identical, the implementation may be rewritten as its +// difficult to compare functions by object identity. +func (f *FunctionDecl) AddOverload(overload *OverloadDecl) error { + if f == nil { + return fmt.Errorf("nil function cannot add overload: %s", overload.ID()) + } + for oID, o := range f.overloads { + if oID != overload.ID() && o.SignatureOverlaps(overload) { + return fmt.Errorf("overload signature collision in function %s: %s collides with %s", f.Name(), oID, overload.ID()) + } + if oID == overload.ID() { + if o.SignatureEquals(overload) && o.IsNonStrict() == overload.IsNonStrict() { + // Allow redefinition of an overload implementation so long as the signatures match. + f.overloads[oID] = overload + return nil + } + return fmt.Errorf("overload redefinition in function. %s: %s has multiple definitions", f.Name(), oID) + } + } + f.overloadOrdinals = append(f.overloadOrdinals, overload.ID()) + f.overloads[overload.ID()] = overload + return nil +} + +// OverloadDecls returns the overload declarations in the order in which they were declared. +func (f *FunctionDecl) OverloadDecls() []*OverloadDecl { + if f == nil { + return []*OverloadDecl{} + } + overloads := make([]*OverloadDecl, 0, len(f.overloads)) + for _, oID := range f.overloadOrdinals { + overloads = append(overloads, f.overloads[oID]) + } + return overloads +} + +// Bindings produces a set of function bindings, if any are defined. +func (f *FunctionDecl) Bindings() ([]*functions.Overload, error) { + if f == nil { + return []*functions.Overload{}, nil + } + overloads := []*functions.Overload{} + nonStrict := false + for _, oID := range f.overloadOrdinals { + o := f.overloads[oID] + if o.hasBinding() { + overload := &functions.Overload{ + Operator: o.ID(), + Unary: o.guardedUnaryOp(f.Name(), f.disableTypeGuards), + Binary: o.guardedBinaryOp(f.Name(), f.disableTypeGuards), + Function: o.guardedFunctionOp(f.Name(), f.disableTypeGuards), + OperandTrait: o.OperandTrait(), + NonStrict: o.IsNonStrict(), + } + overloads = append(overloads, overload) + nonStrict = nonStrict || o.IsNonStrict() + } + } + if f.singleton != nil { + if len(overloads) != 0 { + return nil, fmt.Errorf("singleton function incompatible with specialized overloads: %s", f.Name()) + } + overloads = []*functions.Overload{ + { + Operator: f.Name(), + Unary: f.singleton.Unary, + Binary: f.singleton.Binary, + Function: f.singleton.Function, + OperandTrait: f.singleton.OperandTrait, + }, + } + // fall-through to return single overload case. + } + if len(overloads) == 0 { + return overloads, nil + } + // Single overload. Replicate an entry for it using the function name as well. + if len(overloads) == 1 { + if overloads[0].Operator == f.Name() { + return overloads, nil + } + return append(overloads, &functions.Overload{ + Operator: f.Name(), + Unary: overloads[0].Unary, + Binary: overloads[0].Binary, + Function: overloads[0].Function, + NonStrict: overloads[0].NonStrict, + OperandTrait: overloads[0].OperandTrait, + }), nil + } + // All of the defined overloads are wrapped into a top-level function which + // performs dynamic dispatch to the proper overload based on the argument types. + bindings := append([]*functions.Overload{}, overloads...) + funcDispatch := func(args ...ref.Val) ref.Val { + for _, oID := range f.overloadOrdinals { + o := f.overloads[oID] + // During dynamic dispatch over multiple functions, signature agreement checks + // are preserved in order to assist with the function resolution step. + switch len(args) { + case 1: + if o.unaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) { + return o.unaryOp(args[0]) + } + case 2: + if o.binaryOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) { + return o.binaryOp(args[0], args[1]) + } + } + if o.functionOp != nil && o.matchesRuntimeSignature( /* disableTypeGuards=*/ false, args...) { + return o.functionOp(args...) + } + // eventually this will fall through to the noSuchOverload below. + } + return MaybeNoSuchOverload(f.Name(), args...) + } + function := &functions.Overload{ + Operator: f.Name(), + Function: funcDispatch, + NonStrict: nonStrict, + } + return append(bindings, function), nil +} + +// MaybeNoSuchOverload determines whether to propagate an error if one is provided as an argument, or +// to return an unknown set, or to produce a new error for a missing function signature. +func MaybeNoSuchOverload(funcName string, args ...ref.Val) ref.Val { + argTypes := make([]string, len(args)) + var unk *types.Unknown = nil + for i, arg := range args { + if types.IsError(arg) { + return arg + } + if types.IsUnknown(arg) { + unk = types.MergeUnknowns(arg.(*types.Unknown), unk) + } + argTypes[i] = arg.Type().TypeName() + } + if unk != nil { + return unk + } + signature := strings.Join(argTypes, ", ") + return types.NewErr("no such overload: %s(%s)", funcName, signature) +} + +// FunctionOpt defines a functional option for mutating a function declaration. +type FunctionOpt func(*FunctionDecl) (*FunctionDecl, error) + +// DisableTypeGuards disables automatically generated function invocation guards on direct overload calls. +// Type guards remain on during dynamic dispatch for parsed-only expressions. +func DisableTypeGuards(value bool) FunctionOpt { + return func(fn *FunctionDecl) (*FunctionDecl, error) { + fn.disableTypeGuards = value + return fn, nil + } +} + +// DisableDeclaration indicates that the function declaration should be disabled, but the runtime function +// binding should be provided. Marking a function as runtime-only is a safe way to manage deprecations +// of function declarations while still preserving the runtime behavior for previously compiled expressions. +func DisableDeclaration(value bool) FunctionOpt { + return func(fn *FunctionDecl) (*FunctionDecl, error) { + if value { + fn.state = declarationDisabled + } else { + fn.state = declarationEnabled + } + return fn, nil + } +} + +// SingletonUnaryBinding creates a singleton function definition to be used for all function overloads. +// +// Note, this approach works well if operand is expected to have a specific trait which it implements, +// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. +func SingletonUnaryBinding(fn functions.UnaryOp, traits ...int) FunctionOpt { + trait := 0 + for _, t := range traits { + trait = trait | t + } + return func(f *FunctionDecl) (*FunctionDecl, error) { + if f.singleton != nil { + return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name()) + } + f.singleton = &functions.Overload{ + Operator: f.Name(), + Unary: fn, + OperandTrait: trait, + } + return f, nil + } +} + +// SingletonBinaryBinding creates a singleton function definition to be used with all function overloads. +// +// Note, this approach works well if operand is expected to have a specific trait which it implements, +// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. +func SingletonBinaryBinding(fn functions.BinaryOp, traits ...int) FunctionOpt { + trait := 0 + for _, t := range traits { + trait = trait | t + } + return func(f *FunctionDecl) (*FunctionDecl, error) { + if f.singleton != nil { + return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name()) + } + f.singleton = &functions.Overload{ + Operator: f.Name(), + Binary: fn, + OperandTrait: trait, + } + return f, nil + } +} + +// SingletonFunctionBinding creates a singleton function definition to be used with all function overloads. +// +// Note, this approach works well if operand is expected to have a specific trait which it implements, +// e.g. traits.ContainerType. Otherwise, prefer per-overload function bindings. +func SingletonFunctionBinding(fn functions.FunctionOp, traits ...int) FunctionOpt { + trait := 0 + for _, t := range traits { + trait = trait | t + } + return func(f *FunctionDecl) (*FunctionDecl, error) { + if f.singleton != nil { + return nil, fmt.Errorf("function already has a singleton binding: %s", f.Name()) + } + f.singleton = &functions.Overload{ + Operator: f.Name(), + Function: fn, + OperandTrait: trait, + } + return f, nil + } +} + +// Overload defines a new global overload with an overload id, argument types, and result type. Through the +// use of OverloadOpt options, the overload may also be configured with a binding, an operand trait, and to +// be non-strict. +// +// Note: function bindings should be commonly configured with Overload instances whereas operand traits and +// strict-ness should be rare occurrences. +func Overload(overloadID string, + args []*types.Type, resultType *types.Type, + opts ...OverloadOpt) FunctionOpt { + return newOverload(overloadID, false, args, resultType, opts...) +} + +// MemberOverload defines a new receiver-style overload (or member function) with an overload id, argument types, +// and result type. Through the use of OverloadOpt options, the overload may also be configured with a binding, +// an operand trait, and to be non-strict. +// +// Note: function bindings should be commonly configured with Overload instances whereas operand traits and +// strict-ness should be rare occurrences. +func MemberOverload(overloadID string, + args []*types.Type, resultType *types.Type, + opts ...OverloadOpt) FunctionOpt { + return newOverload(overloadID, true, args, resultType, opts...) +} + +func newOverload(overloadID string, + memberFunction bool, args []*types.Type, resultType *types.Type, + opts ...OverloadOpt) FunctionOpt { + return func(f *FunctionDecl) (*FunctionDecl, error) { + overload, err := newOverloadInternal(overloadID, memberFunction, args, resultType, opts...) + if err != nil { + return nil, err + } + err = f.AddOverload(overload) + if err != nil { + return nil, err + } + return f, nil + } +} + +func newOverloadInternal(overloadID string, + memberFunction bool, args []*types.Type, resultType *types.Type, + opts ...OverloadOpt) (*OverloadDecl, error) { + overload := &OverloadDecl{ + id: overloadID, + argTypes: args, + resultType: resultType, + isMemberFunction: memberFunction, + } + var err error + for _, opt := range opts { + overload, err = opt(overload) + if err != nil { + return nil, err + } + } + return overload, nil +} + +// OverloadDecl contains the definition of a single overload id with a specific signature, and an optional +// implementation. +type OverloadDecl struct { + id string + argTypes []*types.Type + resultType *types.Type + isMemberFunction bool + // nonStrict indicates that the function will accept error and unknown arguments as inputs. + nonStrict bool + // operandTrait indicates whether the member argument should have a specific type-trait. + // + // This is useful for creating overloads which operate on a type-interface rather than a concrete type. + operandTrait int + + // Function implementation options. Optional, but encouraged. + // unaryOp is a function binding that takes a single argument. + unaryOp functions.UnaryOp + // binaryOp is a function binding that takes two arguments. + binaryOp functions.BinaryOp + // functionOp is a catch-all for zero-arity and three-plus arity functions. + functionOp functions.FunctionOp +} + +// ID mirrors the overload signature and provides a unique id which may be referenced within the type-checker +// and interpreter to optimize performance. +// +// The ID format is usually one of two styles: +// global: __ +// member: ___ +func (o *OverloadDecl) ID() string { + if o == nil { + return "" + } + return o.id +} + +// ArgTypes contains the set of argument types expected by the overload. +// +// For member functions ArgTypes[0] represents the member operand type. +func (o *OverloadDecl) ArgTypes() []*types.Type { + if o == nil { + return emptyArgs + } + return o.argTypes +} + +// IsMemberFunction indicates whether the overload is a member function +func (o *OverloadDecl) IsMemberFunction() bool { + if o == nil { + return false + } + return o.isMemberFunction +} + +// IsNonStrict returns whether the overload accepts errors and unknown values as arguments. +func (o *OverloadDecl) IsNonStrict() bool { + if o == nil { + return false + } + return o.nonStrict +} + +// OperandTrait returns the trait mask of the first operand to the overload call, e.g. +// `traits.Indexer` +func (o *OverloadDecl) OperandTrait() int { + if o == nil { + return 0 + } + return o.operandTrait +} + +// ResultType indicates the output type from calling the function. +func (o *OverloadDecl) ResultType() *types.Type { + if o == nil { + // *types.Type is nil-safe + return nil + } + return o.resultType +} + +// TypeParams returns the type parameter names associated with the overload. +func (o *OverloadDecl) TypeParams() []string { + typeParams := map[string]struct{}{} + collectParamNames(typeParams, o.ResultType()) + for _, arg := range o.ArgTypes() { + collectParamNames(typeParams, arg) + } + params := make([]string, 0, len(typeParams)) + for param := range typeParams { + params = append(params, param) + } + return params +} + +// SignatureEquals determines whether the incoming overload declaration signature is equal to the current signature. +// +// Result type, operand trait, and strict-ness are not considered as part of signature equality. +func (o *OverloadDecl) SignatureEquals(other *OverloadDecl) bool { + if o == other { + return true + } + if o.ID() != other.ID() || o.IsMemberFunction() != other.IsMemberFunction() || len(o.ArgTypes()) != len(other.ArgTypes()) { + return false + } + for i, at := range o.ArgTypes() { + oat := other.ArgTypes()[i] + if !at.IsEquivalentType(oat) { + return false + } + } + return o.ResultType().IsEquivalentType(other.ResultType()) +} + +// SignatureOverlaps indicates whether two functions have non-equal, but overloapping function signatures. +// +// For example, list(dyn) collides with list(string) since the 'dyn' type can contain a 'string' type. +func (o *OverloadDecl) SignatureOverlaps(other *OverloadDecl) bool { + if o.IsMemberFunction() != other.IsMemberFunction() || len(o.ArgTypes()) != len(other.ArgTypes()) { + return false + } + argsOverlap := true + for i, argType := range o.ArgTypes() { + otherArgType := other.ArgTypes()[i] + argsOverlap = argsOverlap && + (argType.IsAssignableType(otherArgType) || + otherArgType.IsAssignableType(argType)) + } + return argsOverlap +} + +// hasBinding indicates whether the overload already has a definition. +func (o *OverloadDecl) hasBinding() bool { + return o != nil && (o.unaryOp != nil || o.binaryOp != nil || o.functionOp != nil) +} + +// guardedUnaryOp creates an invocation guard around the provided unary operator, if one is defined. +func (o *OverloadDecl) guardedUnaryOp(funcName string, disableTypeGuards bool) functions.UnaryOp { + if o.unaryOp == nil { + return nil + } + return func(arg ref.Val) ref.Val { + if !o.matchesRuntimeUnarySignature(disableTypeGuards, arg) { + return MaybeNoSuchOverload(funcName, arg) + } + return o.unaryOp(arg) + } +} + +// guardedBinaryOp creates an invocation guard around the provided binary operator, if one is defined. +func (o *OverloadDecl) guardedBinaryOp(funcName string, disableTypeGuards bool) functions.BinaryOp { + if o.binaryOp == nil { + return nil + } + return func(arg1, arg2 ref.Val) ref.Val { + if !o.matchesRuntimeBinarySignature(disableTypeGuards, arg1, arg2) { + return MaybeNoSuchOverload(funcName, arg1, arg2) + } + return o.binaryOp(arg1, arg2) + } +} + +// guardedFunctionOp creates an invocation guard around the provided variadic function binding, if one is provided. +func (o *OverloadDecl) guardedFunctionOp(funcName string, disableTypeGuards bool) functions.FunctionOp { + if o.functionOp == nil { + return nil + } + return func(args ...ref.Val) ref.Val { + if !o.matchesRuntimeSignature(disableTypeGuards, args...) { + return MaybeNoSuchOverload(funcName, args...) + } + return o.functionOp(args...) + } +} + +// matchesRuntimeUnarySignature indicates whether the argument type is runtime assiganble to the overload's expected argument. +func (o *OverloadDecl) matchesRuntimeUnarySignature(disableTypeGuards bool, arg ref.Val) bool { + return matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[0], arg) && + matchOperandTrait(o.OperandTrait(), arg) +} + +// matchesRuntimeBinarySignature indicates whether the argument types are runtime assiganble to the overload's expected arguments. +func (o *OverloadDecl) matchesRuntimeBinarySignature(disableTypeGuards bool, arg1, arg2 ref.Val) bool { + return matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[0], arg1) && + matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[1], arg2) && + matchOperandTrait(o.OperandTrait(), arg1) +} + +// matchesRuntimeSignature indicates whether the argument types are runtime assiganble to the overload's expected arguments. +func (o *OverloadDecl) matchesRuntimeSignature(disableTypeGuards bool, args ...ref.Val) bool { + if len(args) != len(o.ArgTypes()) { + return false + } + if len(args) == 0 { + return true + } + for i, arg := range args { + if !matchRuntimeArgType(o.IsNonStrict(), disableTypeGuards, o.ArgTypes()[i], arg) { + return false + } + } + return matchOperandTrait(o.OperandTrait(), args[0]) +} + +func matchRuntimeArgType(nonStrict, disableTypeGuards bool, argType *types.Type, arg ref.Val) bool { + if nonStrict && (disableTypeGuards || types.IsUnknownOrError(arg)) { + return true + } + if types.IsUnknownOrError(arg) { + return false + } + return disableTypeGuards || argType.IsAssignableRuntimeType(arg) +} + +func matchOperandTrait(trait int, arg ref.Val) bool { + return trait == 0 || arg.Type().HasTrait(trait) || types.IsUnknownOrError(arg) +} + +// OverloadOpt is a functional option for configuring a function overload. +type OverloadOpt func(*OverloadDecl) (*OverloadDecl, error) + +// UnaryBinding provides the implementation of a unary overload. The provided function is protected by a runtime +// type-guard which ensures runtime type agreement between the overload signature and runtime argument types. +func UnaryBinding(binding functions.UnaryOp) OverloadOpt { + return func(o *OverloadDecl) (*OverloadDecl, error) { + if o.hasBinding() { + return nil, fmt.Errorf("overload already has a binding: %s", o.ID()) + } + if len(o.ArgTypes()) != 1 { + return nil, fmt.Errorf("unary function bound to non-unary overload: %s", o.ID()) + } + o.unaryOp = binding + return o, nil + } +} + +// BinaryBinding provides the implementation of a binary overload. The provided function is protected by a runtime +// type-guard which ensures runtime type agreement between the overload signature and runtime argument types. +func BinaryBinding(binding functions.BinaryOp) OverloadOpt { + return func(o *OverloadDecl) (*OverloadDecl, error) { + if o.hasBinding() { + return nil, fmt.Errorf("overload already has a binding: %s", o.ID()) + } + if len(o.ArgTypes()) != 2 { + return nil, fmt.Errorf("binary function bound to non-binary overload: %s", o.ID()) + } + o.binaryOp = binding + return o, nil + } +} + +// FunctionBinding provides the implementation of a variadic overload. The provided function is protected by a runtime +// type-guard which ensures runtime type agreement between the overload signature and runtime argument types. +func FunctionBinding(binding functions.FunctionOp) OverloadOpt { + return func(o *OverloadDecl) (*OverloadDecl, error) { + if o.hasBinding() { + return nil, fmt.Errorf("overload already has a binding: %s", o.ID()) + } + o.functionOp = binding + return o, nil + } +} + +// OverloadIsNonStrict enables the function to be called with error and unknown argument values. +// +// Note: do not use this option unless absoluately necessary as it should be an uncommon feature. +func OverloadIsNonStrict() OverloadOpt { + return func(o *OverloadDecl) (*OverloadDecl, error) { + o.nonStrict = true + return o, nil + } +} + +// OverloadOperandTrait configures a set of traits which the first argument to the overload must implement in order to be +// successfully invoked. +func OverloadOperandTrait(trait int) OverloadOpt { + return func(o *OverloadDecl) (*OverloadDecl, error) { + o.operandTrait = trait + return o, nil + } +} + +// NewConstant creates a new constant declaration. +func NewConstant(name string, t *types.Type, v ref.Val) *VariableDecl { + return &VariableDecl{name: name, varType: t, value: v} +} + +// NewVariable creates a new variable declaration. +func NewVariable(name string, t *types.Type) *VariableDecl { + return &VariableDecl{name: name, varType: t} +} + +// VariableDecl defines a variable declaration which may optionally have a constant value. +type VariableDecl struct { + name string + varType *types.Type + value ref.Val +} + +// Name returns the fully-qualified variable name +func (v *VariableDecl) Name() string { + if v == nil { + return "" + } + return v.name +} + +// Type returns the types.Type value associated with the variable. +func (v *VariableDecl) Type() *types.Type { + if v == nil { + // types.Type is nil-safe + return nil + } + return v.varType +} + +// Value returns the constant value associated with the declaration. +func (v *VariableDecl) Value() ref.Val { + if v == nil { + return nil + } + return v.value +} + +// DeclarationIsEquivalent returns true if one variable declaration has the same name and same type as the input. +func (v *VariableDecl) DeclarationIsEquivalent(other *VariableDecl) bool { + if v == other { + return true + } + return v.Name() == other.Name() && v.Type().IsEquivalentType(other.Type()) +} + +// VariableDeclToExprDecl converts a go-native variable declaration into a protobuf-type variable declaration. +func VariableDeclToExprDecl(v *VariableDecl) (*exprpb.Decl, error) { + varType, err := types.TypeToExprType(v.Type()) + if err != nil { + return nil, err + } + return chkdecls.NewVar(v.Name(), varType), nil +} + +// TypeVariable creates a new type identifier for use within a types.Provider +func TypeVariable(t *types.Type) *VariableDecl { + return NewVariable(t.TypeName(), types.NewTypeTypeWithParam(t)) +} + +// FunctionDeclToExprDecl converts a go-native function declaration into a protobuf-typed function declaration. +func FunctionDeclToExprDecl(f *FunctionDecl) (*exprpb.Decl, error) { + overloads := make([]*exprpb.Decl_FunctionDecl_Overload, len(f.overloads)) + for i, oID := range f.overloadOrdinals { + o := f.overloads[oID] + paramNames := map[string]struct{}{} + argTypes := make([]*exprpb.Type, len(o.ArgTypes())) + for j, a := range o.ArgTypes() { + collectParamNames(paramNames, a) + at, err := types.TypeToExprType(a) + if err != nil { + return nil, err + } + argTypes[j] = at + } + collectParamNames(paramNames, o.ResultType()) + resultType, err := types.TypeToExprType(o.ResultType()) + if err != nil { + return nil, err + } + if len(paramNames) == 0 { + if o.IsMemberFunction() { + overloads[i] = chkdecls.NewInstanceOverload(oID, argTypes, resultType) + } else { + overloads[i] = chkdecls.NewOverload(oID, argTypes, resultType) + } + } else { + params := []string{} + for pn := range paramNames { + params = append(params, pn) + } + if o.IsMemberFunction() { + overloads[i] = chkdecls.NewParameterizedInstanceOverload(oID, argTypes, resultType, params) + } else { + overloads[i] = chkdecls.NewParameterizedOverload(oID, argTypes, resultType, params) + } + } + } + return chkdecls.NewFunction(f.Name(), overloads...), nil +} + +func collectParamNames(paramNames map[string]struct{}, arg *types.Type) { + if arg.Kind() == types.TypeParamKind { + paramNames[arg.TypeName()] = struct{}{} + } + for _, param := range arg.Parameters() { + collectParamNames(paramNames, param) + } +} + +var ( + emptyArgs = []*types.Type{} +) diff --git a/vendor/github.com/google/cel-go/common/error.go b/vendor/github.com/google/cel-go/common/error.go index f91f7f8d1..774dcb5b4 100644 --- a/vendor/github.com/google/cel-go/common/error.go +++ b/vendor/github.com/google/cel-go/common/error.go @@ -22,10 +22,16 @@ import ( "golang.org/x/text/width" ) -// Error type which references a location within source and a message. +// NewError creates an error associated with an expression id with the given message at the given location. +func NewError(id int64, message string, location Location) *Error { + return &Error{Message: message, Location: location, ExprID: id} +} + +// Error type which references an expression id, a location within source, and a message. type Error struct { Location Location Message string + ExprID int64 } const ( diff --git a/vendor/github.com/google/cel-go/common/errors.go b/vendor/github.com/google/cel-go/common/errors.go index 1565085ab..25adc73d8 100644 --- a/vendor/github.com/google/cel-go/common/errors.go +++ b/vendor/github.com/google/cel-go/common/errors.go @@ -22,7 +22,7 @@ import ( // Errors type which contains a list of errors observed during parsing. type Errors struct { - errors []Error + errors []*Error source Source numErrors int maxErrorsToReport int @@ -31,7 +31,7 @@ type Errors struct { // NewErrors creates a new instance of the Errors type. func NewErrors(source Source) *Errors { return &Errors{ - errors: []Error{}, + errors: []*Error{}, source: source, maxErrorsToReport: 100, } @@ -39,11 +39,17 @@ func NewErrors(source Source) *Errors { // ReportError records an error at a source location. func (e *Errors) ReportError(l Location, format string, args ...any) { + e.ReportErrorAtID(0, l, format, args...) +} + +// ReportErrorAtID records an error at a source location and expression id. +func (e *Errors) ReportErrorAtID(id int64, l Location, format string, args ...any) { e.numErrors++ if e.numErrors > e.maxErrorsToReport { return } - err := Error{ + err := &Error{ + ExprID: id, Location: l, Message: fmt.Sprintf(format, args...), } @@ -51,14 +57,14 @@ func (e *Errors) ReportError(l Location, format string, args ...any) { } // GetErrors returns the list of observed errors. -func (e *Errors) GetErrors() []Error { +func (e *Errors) GetErrors() []*Error { return e.errors[:] } // Append creates a new Errors object with the current and input errors. -func (e *Errors) Append(errs []Error) *Errors { +func (e *Errors) Append(errs []*Error) *Errors { return &Errors{ - errors: append(e.errors, errs...), + errors: append(e.errors[:], errs...), source: e.source, numErrors: e.numErrors + len(errs), maxErrorsToReport: e.maxErrorsToReport, diff --git a/vendor/github.com/google/cel-go/common/functions/BUILD.bazel b/vendor/github.com/google/cel-go/common/functions/BUILD.bazel new file mode 100644 index 000000000..3cc27d60c --- /dev/null +++ b/vendor/github.com/google/cel-go/common/functions/BUILD.bazel @@ -0,0 +1,17 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library") + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], # Apache 2.0 +) + +go_library( + name = "go_default_library", + srcs = [ + "functions.go", + ], + importpath = "github.com/google/cel-go/common/functions", + deps = [ + "//common/types/ref:go_default_library", + ], +) diff --git a/vendor/github.com/google/cel-go/common/functions/functions.go b/vendor/github.com/google/cel-go/common/functions/functions.go new file mode 100644 index 000000000..67f4a5944 --- /dev/null +++ b/vendor/github.com/google/cel-go/common/functions/functions.go @@ -0,0 +1,61 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package functions defines the standard builtin functions supported by the interpreter +package functions + +import "github.com/google/cel-go/common/types/ref" + +// Overload defines a named overload of a function, indicating an operand trait +// which must be present on the first argument to the overload as well as one +// of either a unary, binary, or function implementation. +// +// The majority of operators within the expression language are unary or binary +// and the specializations simplify the call contract for implementers of +// types with operator overloads. Any added complexity is assumed to be handled +// by the generic FunctionOp. +type Overload struct { + // Operator name as written in an expression or defined within + // operators.go. + Operator string + + // Operand trait used to dispatch the call. The zero-value indicates a + // global function overload or that one of the Unary / Binary / Function + // definitions should be used to execute the call. + OperandTrait int + + // Unary defines the overload with a UnaryOp implementation. May be nil. + Unary UnaryOp + + // Binary defines the overload with a BinaryOp implementation. May be nil. + Binary BinaryOp + + // Function defines the overload with a FunctionOp implementation. May be + // nil. + Function FunctionOp + + // NonStrict specifies whether the Overload will tolerate arguments that + // are types.Err or types.Unknown. + NonStrict bool +} + +// UnaryOp is a function that takes a single value and produces an output. +type UnaryOp func(value ref.Val) ref.Val + +// BinaryOp is a function that takes two values and produces an output. +type BinaryOp func(lhs ref.Val, rhs ref.Val) ref.Val + +// FunctionOp is a function with accepts zero or more arguments and produces +// a value or error as a result. +type FunctionOp func(values ...ref.Val) ref.Val diff --git a/vendor/github.com/google/cel-go/common/source.go b/vendor/github.com/google/cel-go/common/source.go index 52377d930..acf22bdf1 100644 --- a/vendor/github.com/google/cel-go/common/source.go +++ b/vendor/github.com/google/cel-go/common/source.go @@ -64,7 +64,6 @@ type sourceImpl struct { runes.Buffer description string lineOffsets []int32 - idOffsets map[int64]int32 } var _ runes.Buffer = &sourceImpl{} @@ -92,7 +91,6 @@ func NewStringSource(contents string, description string) Source { Buffer: runes.NewBuffer(contents), description: description, lineOffsets: offsets, - idOffsets: map[int64]int32{}, } } @@ -102,7 +100,6 @@ func NewInfoSource(info *exprpb.SourceInfo) Source { Buffer: runes.NewBuffer(""), description: info.GetLocation(), lineOffsets: info.GetLineOffsets(), - idOffsets: info.GetPositions(), } } diff --git a/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel b/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel new file mode 100644 index 000000000..c130a93f6 --- /dev/null +++ b/vendor/github.com/google/cel-go/common/stdlib/BUILD.bazel @@ -0,0 +1,25 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") + +package( + default_visibility = ["//visibility:public"], + licenses = ["notice"], # Apache 2.0 +) + +go_library( + name = "go_default_library", + srcs = [ + "standard.go", + ], + importpath = "github.com/google/cel-go/common/stdlib", + deps = [ + "//checker/decls:go_default_library", + "//common/decls:go_default_library", + "//common/functions:go_default_library", + "//common/operators:go_default_library", + "//common/overloads:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", + "//common/types/traits:go_default_library", + "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", + ], +) \ No newline at end of file diff --git a/vendor/github.com/google/cel-go/common/stdlib/standard.go b/vendor/github.com/google/cel-go/common/stdlib/standard.go new file mode 100644 index 000000000..d02cb64bf --- /dev/null +++ b/vendor/github.com/google/cel-go/common/stdlib/standard.go @@ -0,0 +1,661 @@ +// Copyright 2018 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// Package stdlib contains all of the standard library function declarations and definitions for CEL. +package stdlib + +import ( + "github.com/google/cel-go/common/decls" + "github.com/google/cel-go/common/functions" + "github.com/google/cel-go/common/operators" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +var ( + stdFunctions []*decls.FunctionDecl + stdFnDecls []*exprpb.Decl + stdTypes []*decls.VariableDecl + stdTypeDecls []*exprpb.Decl +) + +func init() { + paramA := types.NewTypeParamType("A") + paramB := types.NewTypeParamType("B") + listOfA := types.NewListType(paramA) + mapOfAB := types.NewMapType(paramA, paramB) + + stdTypes = []*decls.VariableDecl{ + decls.TypeVariable(types.BoolType), + decls.TypeVariable(types.BytesType), + decls.TypeVariable(types.DoubleType), + decls.TypeVariable(types.DurationType), + decls.TypeVariable(types.IntType), + decls.TypeVariable(listOfA), + decls.TypeVariable(mapOfAB), + decls.TypeVariable(types.NullType), + decls.TypeVariable(types.StringType), + decls.TypeVariable(types.TimestampType), + decls.TypeVariable(types.TypeType), + decls.TypeVariable(types.UintType), + } + + stdTypeDecls = make([]*exprpb.Decl, 0, len(stdTypes)) + for _, stdType := range stdTypes { + typeVar, err := decls.VariableDeclToExprDecl(stdType) + if err != nil { + panic(err) + } + stdTypeDecls = append(stdTypeDecls, typeVar) + } + + stdFunctions = []*decls.FunctionDecl{ + // Logical operators. Special-cased within the interpreter. + // Note, the singleton binding prevents extensions from overriding the operator behavior. + function(operators.Conditional, + decls.Overload(overloads.Conditional, argTypes(types.BoolType, paramA, paramA), paramA, + decls.OverloadIsNonStrict()), + decls.SingletonFunctionBinding(noFunctionOverrides)), + function(operators.LogicalAnd, + decls.Overload(overloads.LogicalAnd, argTypes(types.BoolType, types.BoolType), types.BoolType, + decls.OverloadIsNonStrict()), + decls.SingletonBinaryBinding(noBinaryOverrides)), + function(operators.LogicalOr, + decls.Overload(overloads.LogicalOr, argTypes(types.BoolType, types.BoolType), types.BoolType, + decls.OverloadIsNonStrict()), + decls.SingletonBinaryBinding(noBinaryOverrides)), + function(operators.LogicalNot, + decls.Overload(overloads.LogicalNot, argTypes(types.BoolType), types.BoolType), + decls.SingletonUnaryBinding(func(val ref.Val) ref.Val { + b, ok := val.(types.Bool) + if !ok { + return types.MaybeNoSuchOverloadErr(val) + } + return b.Negate() + })), + + // Comprehension short-circuiting related function + function(operators.NotStrictlyFalse, + decls.Overload(overloads.NotStrictlyFalse, argTypes(types.BoolType), types.BoolType, + decls.OverloadIsNonStrict(), + decls.UnaryBinding(notStrictlyFalse))), + // Deprecated: __not_strictly_false__ + function(operators.OldNotStrictlyFalse, + decls.DisableDeclaration(true), // safe deprecation + decls.Overload(operators.OldNotStrictlyFalse, argTypes(types.BoolType), types.BoolType, + decls.OverloadIsNonStrict(), + decls.UnaryBinding(notStrictlyFalse))), + + // Equality / inequality. Special-cased in the interpreter + function(operators.Equals, + decls.Overload(overloads.Equals, argTypes(paramA, paramA), types.BoolType), + decls.SingletonBinaryBinding(noBinaryOverrides)), + function(operators.NotEquals, + decls.Overload(overloads.NotEquals, argTypes(paramA, paramA), types.BoolType), + decls.SingletonBinaryBinding(noBinaryOverrides)), + + // Mathematical operators + function(operators.Add, + decls.Overload(overloads.AddBytes, + argTypes(types.BytesType, types.BytesType), types.BytesType), + decls.Overload(overloads.AddDouble, + argTypes(types.DoubleType, types.DoubleType), types.DoubleType), + decls.Overload(overloads.AddDurationDuration, + argTypes(types.DurationType, types.DurationType), types.DurationType), + decls.Overload(overloads.AddDurationTimestamp, + argTypes(types.DurationType, types.TimestampType), types.TimestampType), + decls.Overload(overloads.AddTimestampDuration, + argTypes(types.TimestampType, types.DurationType), types.TimestampType), + decls.Overload(overloads.AddInt64, + argTypes(types.IntType, types.IntType), types.IntType), + decls.Overload(overloads.AddList, + argTypes(listOfA, listOfA), listOfA), + decls.Overload(overloads.AddString, + argTypes(types.StringType, types.StringType), types.StringType), + decls.Overload(overloads.AddUint64, + argTypes(types.UintType, types.UintType), types.UintType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Adder).Add(rhs) + }, traits.AdderType)), + function(operators.Divide, + decls.Overload(overloads.DivideDouble, + argTypes(types.DoubleType, types.DoubleType), types.DoubleType), + decls.Overload(overloads.DivideInt64, + argTypes(types.IntType, types.IntType), types.IntType), + decls.Overload(overloads.DivideUint64, + argTypes(types.UintType, types.UintType), types.UintType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Divider).Divide(rhs) + }, traits.DividerType)), + function(operators.Modulo, + decls.Overload(overloads.ModuloInt64, + argTypes(types.IntType, types.IntType), types.IntType), + decls.Overload(overloads.ModuloUint64, + argTypes(types.UintType, types.UintType), types.UintType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Modder).Modulo(rhs) + }, traits.ModderType)), + function(operators.Multiply, + decls.Overload(overloads.MultiplyDouble, + argTypes(types.DoubleType, types.DoubleType), types.DoubleType), + decls.Overload(overloads.MultiplyInt64, + argTypes(types.IntType, types.IntType), types.IntType), + decls.Overload(overloads.MultiplyUint64, + argTypes(types.UintType, types.UintType), types.UintType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Multiplier).Multiply(rhs) + }, traits.MultiplierType)), + function(operators.Negate, + decls.Overload(overloads.NegateDouble, argTypes(types.DoubleType), types.DoubleType), + decls.Overload(overloads.NegateInt64, argTypes(types.IntType), types.IntType), + decls.SingletonUnaryBinding(func(val ref.Val) ref.Val { + if types.IsBool(val) { + return types.MaybeNoSuchOverloadErr(val) + } + return val.(traits.Negater).Negate() + }, traits.NegatorType)), + function(operators.Subtract, + decls.Overload(overloads.SubtractDouble, + argTypes(types.DoubleType, types.DoubleType), types.DoubleType), + decls.Overload(overloads.SubtractDurationDuration, + argTypes(types.DurationType, types.DurationType), types.DurationType), + decls.Overload(overloads.SubtractInt64, + argTypes(types.IntType, types.IntType), types.IntType), + decls.Overload(overloads.SubtractTimestampDuration, + argTypes(types.TimestampType, types.DurationType), types.TimestampType), + decls.Overload(overloads.SubtractTimestampTimestamp, + argTypes(types.TimestampType, types.TimestampType), types.DurationType), + decls.Overload(overloads.SubtractUint64, + argTypes(types.UintType, types.UintType), types.UintType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Subtractor).Subtract(rhs) + }, traits.SubtractorType)), + + // Relations operators + + function(operators.Less, + decls.Overload(overloads.LessBool, + argTypes(types.BoolType, types.BoolType), types.BoolType), + decls.Overload(overloads.LessInt64, + argTypes(types.IntType, types.IntType), types.BoolType), + decls.Overload(overloads.LessInt64Double, + argTypes(types.IntType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessInt64Uint64, + argTypes(types.IntType, types.UintType), types.BoolType), + decls.Overload(overloads.LessUint64, + argTypes(types.UintType, types.UintType), types.BoolType), + decls.Overload(overloads.LessUint64Double, + argTypes(types.UintType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessUint64Int64, + argTypes(types.UintType, types.IntType), types.BoolType), + decls.Overload(overloads.LessDouble, + argTypes(types.DoubleType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessDoubleInt64, + argTypes(types.DoubleType, types.IntType), types.BoolType), + decls.Overload(overloads.LessDoubleUint64, + argTypes(types.DoubleType, types.UintType), types.BoolType), + decls.Overload(overloads.LessString, + argTypes(types.StringType, types.StringType), types.BoolType), + decls.Overload(overloads.LessBytes, + argTypes(types.BytesType, types.BytesType), types.BoolType), + decls.Overload(overloads.LessTimestamp, + argTypes(types.TimestampType, types.TimestampType), types.BoolType), + decls.Overload(overloads.LessDuration, + argTypes(types.DurationType, types.DurationType), types.BoolType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + cmp := lhs.(traits.Comparer).Compare(rhs) + if cmp == types.IntNegOne { + return types.True + } + if cmp == types.IntOne || cmp == types.IntZero { + return types.False + } + return cmp + }, traits.ComparerType)), + + function(operators.LessEquals, + decls.Overload(overloads.LessEqualsBool, + argTypes(types.BoolType, types.BoolType), types.BoolType), + decls.Overload(overloads.LessEqualsInt64, + argTypes(types.IntType, types.IntType), types.BoolType), + decls.Overload(overloads.LessEqualsInt64Double, + argTypes(types.IntType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessEqualsInt64Uint64, + argTypes(types.IntType, types.UintType), types.BoolType), + decls.Overload(overloads.LessEqualsUint64, + argTypes(types.UintType, types.UintType), types.BoolType), + decls.Overload(overloads.LessEqualsUint64Double, + argTypes(types.UintType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessEqualsUint64Int64, + argTypes(types.UintType, types.IntType), types.BoolType), + decls.Overload(overloads.LessEqualsDouble, + argTypes(types.DoubleType, types.DoubleType), types.BoolType), + decls.Overload(overloads.LessEqualsDoubleInt64, + argTypes(types.DoubleType, types.IntType), types.BoolType), + decls.Overload(overloads.LessEqualsDoubleUint64, + argTypes(types.DoubleType, types.UintType), types.BoolType), + decls.Overload(overloads.LessEqualsString, + argTypes(types.StringType, types.StringType), types.BoolType), + decls.Overload(overloads.LessEqualsBytes, + argTypes(types.BytesType, types.BytesType), types.BoolType), + decls.Overload(overloads.LessEqualsTimestamp, + argTypes(types.TimestampType, types.TimestampType), types.BoolType), + decls.Overload(overloads.LessEqualsDuration, + argTypes(types.DurationType, types.DurationType), types.BoolType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + cmp := lhs.(traits.Comparer).Compare(rhs) + if cmp == types.IntNegOne || cmp == types.IntZero { + return types.True + } + if cmp == types.IntOne { + return types.False + } + return cmp + }, traits.ComparerType)), + + function(operators.Greater, + decls.Overload(overloads.GreaterBool, + argTypes(types.BoolType, types.BoolType), types.BoolType), + decls.Overload(overloads.GreaterInt64, + argTypes(types.IntType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterInt64Double, + argTypes(types.IntType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterInt64Uint64, + argTypes(types.IntType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterUint64, + argTypes(types.UintType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterUint64Double, + argTypes(types.UintType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterUint64Int64, + argTypes(types.UintType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterDouble, + argTypes(types.DoubleType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterDoubleInt64, + argTypes(types.DoubleType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterDoubleUint64, + argTypes(types.DoubleType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterString, + argTypes(types.StringType, types.StringType), types.BoolType), + decls.Overload(overloads.GreaterBytes, + argTypes(types.BytesType, types.BytesType), types.BoolType), + decls.Overload(overloads.GreaterTimestamp, + argTypes(types.TimestampType, types.TimestampType), types.BoolType), + decls.Overload(overloads.GreaterDuration, + argTypes(types.DurationType, types.DurationType), types.BoolType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + cmp := lhs.(traits.Comparer).Compare(rhs) + if cmp == types.IntOne { + return types.True + } + if cmp == types.IntNegOne || cmp == types.IntZero { + return types.False + } + return cmp + }, traits.ComparerType)), + + function(operators.GreaterEquals, + decls.Overload(overloads.GreaterEqualsBool, + argTypes(types.BoolType, types.BoolType), types.BoolType), + decls.Overload(overloads.GreaterEqualsInt64, + argTypes(types.IntType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterEqualsInt64Double, + argTypes(types.IntType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterEqualsInt64Uint64, + argTypes(types.IntType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterEqualsUint64, + argTypes(types.UintType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterEqualsUint64Double, + argTypes(types.UintType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterEqualsUint64Int64, + argTypes(types.UintType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterEqualsDouble, + argTypes(types.DoubleType, types.DoubleType), types.BoolType), + decls.Overload(overloads.GreaterEqualsDoubleInt64, + argTypes(types.DoubleType, types.IntType), types.BoolType), + decls.Overload(overloads.GreaterEqualsDoubleUint64, + argTypes(types.DoubleType, types.UintType), types.BoolType), + decls.Overload(overloads.GreaterEqualsString, + argTypes(types.StringType, types.StringType), types.BoolType), + decls.Overload(overloads.GreaterEqualsBytes, + argTypes(types.BytesType, types.BytesType), types.BoolType), + decls.Overload(overloads.GreaterEqualsTimestamp, + argTypes(types.TimestampType, types.TimestampType), types.BoolType), + decls.Overload(overloads.GreaterEqualsDuration, + argTypes(types.DurationType, types.DurationType), types.BoolType), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + cmp := lhs.(traits.Comparer).Compare(rhs) + if cmp == types.IntOne || cmp == types.IntZero { + return types.True + } + if cmp == types.IntNegOne { + return types.False + } + return cmp + }, traits.ComparerType)), + + // Indexing + function(operators.Index, + decls.Overload(overloads.IndexList, argTypes(listOfA, types.IntType), paramA), + decls.Overload(overloads.IndexMap, argTypes(mapOfAB, paramA), paramB), + decls.SingletonBinaryBinding(func(lhs, rhs ref.Val) ref.Val { + return lhs.(traits.Indexer).Get(rhs) + }, traits.IndexerType)), + + // Collections operators + function(operators.In, + decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType), + decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType), + decls.SingletonBinaryBinding(inAggregate)), + function(operators.OldIn, + decls.DisableDeclaration(true), // safe deprecation + decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType), + decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType), + decls.SingletonBinaryBinding(inAggregate)), + function(overloads.DeprecatedIn, + decls.DisableDeclaration(true), // safe deprecation + decls.Overload(overloads.InList, argTypes(paramA, listOfA), types.BoolType), + decls.Overload(overloads.InMap, argTypes(paramA, mapOfAB), types.BoolType), + decls.SingletonBinaryBinding(inAggregate)), + function(overloads.Size, + decls.Overload(overloads.SizeBytes, argTypes(types.BytesType), types.IntType), + decls.MemberOverload(overloads.SizeBytesInst, argTypes(types.BytesType), types.IntType), + decls.Overload(overloads.SizeList, argTypes(listOfA), types.IntType), + decls.MemberOverload(overloads.SizeListInst, argTypes(listOfA), types.IntType), + decls.Overload(overloads.SizeMap, argTypes(mapOfAB), types.IntType), + decls.MemberOverload(overloads.SizeMapInst, argTypes(mapOfAB), types.IntType), + decls.Overload(overloads.SizeString, argTypes(types.StringType), types.IntType), + decls.MemberOverload(overloads.SizeStringInst, argTypes(types.StringType), types.IntType), + decls.SingletonUnaryBinding(func(val ref.Val) ref.Val { + return val.(traits.Sizer).Size() + }, traits.SizerType)), + + // Type conversions + function(overloads.TypeConvertType, + decls.Overload(overloads.TypeConvertType, argTypes(paramA), types.NewTypeTypeWithParam(paramA)), + decls.SingletonUnaryBinding(convertToType(types.TypeType))), + + // Bool conversions + function(overloads.TypeConvertBool, + decls.Overload(overloads.BoolToBool, argTypes(types.BoolType), types.BoolType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.StringToBool, argTypes(types.StringType), types.BoolType, + decls.UnaryBinding(convertToType(types.BoolType)))), + + // Bytes conversions + function(overloads.TypeConvertBytes, + decls.Overload(overloads.BytesToBytes, argTypes(types.BytesType), types.BytesType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.StringToBytes, argTypes(types.StringType), types.BytesType, + decls.UnaryBinding(convertToType(types.BytesType)))), + + // Double conversions + function(overloads.TypeConvertDouble, + decls.Overload(overloads.DoubleToDouble, argTypes(types.DoubleType), types.DoubleType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.IntToDouble, argTypes(types.IntType), types.DoubleType, + decls.UnaryBinding(convertToType(types.DoubleType))), + decls.Overload(overloads.StringToDouble, argTypes(types.StringType), types.DoubleType, + decls.UnaryBinding(convertToType(types.DoubleType))), + decls.Overload(overloads.UintToDouble, argTypes(types.UintType), types.DoubleType, + decls.UnaryBinding(convertToType(types.DoubleType)))), + + // Duration conversions + function(overloads.TypeConvertDuration, + decls.Overload(overloads.DurationToDuration, argTypes(types.DurationType), types.DurationType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.IntToDuration, argTypes(types.IntType), types.DurationType, + decls.UnaryBinding(convertToType(types.DurationType))), + decls.Overload(overloads.StringToDuration, argTypes(types.StringType), types.DurationType, + decls.UnaryBinding(convertToType(types.DurationType)))), + + // Dyn conversions + function(overloads.TypeConvertDyn, + decls.Overload(overloads.ToDyn, argTypes(paramA), types.DynType), + decls.SingletonUnaryBinding(identity)), + + // Int conversions + function(overloads.TypeConvertInt, + decls.Overload(overloads.IntToInt, argTypes(types.IntType), types.IntType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.DoubleToInt, argTypes(types.DoubleType), types.IntType, + decls.UnaryBinding(convertToType(types.IntType))), + decls.Overload(overloads.DurationToInt, argTypes(types.DurationType), types.IntType, + decls.UnaryBinding(convertToType(types.IntType))), + decls.Overload(overloads.StringToInt, argTypes(types.StringType), types.IntType, + decls.UnaryBinding(convertToType(types.IntType))), + decls.Overload(overloads.TimestampToInt, argTypes(types.TimestampType), types.IntType, + decls.UnaryBinding(convertToType(types.IntType))), + decls.Overload(overloads.UintToInt, argTypes(types.UintType), types.IntType, + decls.UnaryBinding(convertToType(types.IntType))), + ), + + // String conversions + function(overloads.TypeConvertString, + decls.Overload(overloads.StringToString, argTypes(types.StringType), types.StringType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.BoolToString, argTypes(types.BoolType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.BytesToString, argTypes(types.BytesType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.DoubleToString, argTypes(types.DoubleType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.DurationToString, argTypes(types.DurationType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.IntToString, argTypes(types.IntType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.TimestampToString, argTypes(types.TimestampType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType))), + decls.Overload(overloads.UintToString, argTypes(types.UintType), types.StringType, + decls.UnaryBinding(convertToType(types.StringType)))), + + // Timestamp conversions + function(overloads.TypeConvertTimestamp, + decls.Overload(overloads.TimestampToTimestamp, argTypes(types.TimestampType), types.TimestampType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.IntToTimestamp, argTypes(types.IntType), types.TimestampType, + decls.UnaryBinding(convertToType(types.TimestampType))), + decls.Overload(overloads.StringToTimestamp, argTypes(types.StringType), types.TimestampType, + decls.UnaryBinding(convertToType(types.TimestampType)))), + + // Uint conversions + function(overloads.TypeConvertUint, + decls.Overload(overloads.UintToUint, argTypes(types.UintType), types.UintType, + decls.UnaryBinding(identity)), + decls.Overload(overloads.DoubleToUint, argTypes(types.DoubleType), types.UintType, + decls.UnaryBinding(convertToType(types.UintType))), + decls.Overload(overloads.IntToUint, argTypes(types.IntType), types.UintType, + decls.UnaryBinding(convertToType(types.UintType))), + decls.Overload(overloads.StringToUint, argTypes(types.StringType), types.UintType, + decls.UnaryBinding(convertToType(types.UintType)))), + + // String functions + function(overloads.Contains, + decls.MemberOverload(overloads.ContainsString, + argTypes(types.StringType, types.StringType), types.BoolType, + decls.BinaryBinding(types.StringContains)), + decls.DisableTypeGuards(true)), + function(overloads.EndsWith, + decls.MemberOverload(overloads.EndsWithString, + argTypes(types.StringType, types.StringType), types.BoolType, + decls.BinaryBinding(types.StringEndsWith)), + decls.DisableTypeGuards(true)), + function(overloads.StartsWith, + decls.MemberOverload(overloads.StartsWithString, + argTypes(types.StringType, types.StringType), types.BoolType, + decls.BinaryBinding(types.StringStartsWith)), + decls.DisableTypeGuards(true)), + function(overloads.Matches, + decls.Overload(overloads.Matches, argTypes(types.StringType, types.StringType), types.BoolType), + decls.MemberOverload(overloads.MatchesString, + argTypes(types.StringType, types.StringType), types.BoolType), + decls.SingletonBinaryBinding(func(str, pat ref.Val) ref.Val { + return str.(traits.Matcher).Match(pat) + }, traits.MatcherType)), + + // Timestamp / duration functions + function(overloads.TimeGetFullYear, + decls.MemberOverload(overloads.TimestampToYear, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToYearWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetMonth, + decls.MemberOverload(overloads.TimestampToMonth, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToMonthWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetDayOfYear, + decls.MemberOverload(overloads.TimestampToDayOfYear, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToDayOfYearWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetDayOfMonth, + decls.MemberOverload(overloads.TimestampToDayOfMonthZeroBased, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToDayOfMonthZeroBasedWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetDate, + decls.MemberOverload(overloads.TimestampToDayOfMonthOneBased, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToDayOfMonthOneBasedWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetDayOfWeek, + decls.MemberOverload(overloads.TimestampToDayOfWeek, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToDayOfWeekWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType)), + + function(overloads.TimeGetHours, + decls.MemberOverload(overloads.TimestampToHours, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToHoursWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType), + decls.MemberOverload(overloads.DurationToHours, + argTypes(types.DurationType), types.IntType)), + + function(overloads.TimeGetMinutes, + decls.MemberOverload(overloads.TimestampToMinutes, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToMinutesWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType), + decls.MemberOverload(overloads.DurationToMinutes, + argTypes(types.DurationType), types.IntType)), + + function(overloads.TimeGetSeconds, + decls.MemberOverload(overloads.TimestampToSeconds, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToSecondsWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType), + decls.MemberOverload(overloads.DurationToSeconds, + argTypes(types.DurationType), types.IntType)), + + function(overloads.TimeGetMilliseconds, + decls.MemberOverload(overloads.TimestampToMilliseconds, + argTypes(types.TimestampType), types.IntType), + decls.MemberOverload(overloads.TimestampToMillisecondsWithTz, + argTypes(types.TimestampType, types.StringType), types.IntType), + decls.MemberOverload(overloads.DurationToMilliseconds, + argTypes(types.DurationType), types.IntType)), + } + + stdFnDecls = make([]*exprpb.Decl, 0, len(stdFunctions)) + for _, fn := range stdFunctions { + if fn.IsDeclarationDisabled() { + continue + } + ed, err := decls.FunctionDeclToExprDecl(fn) + if err != nil { + panic(err) + } + stdFnDecls = append(stdFnDecls, ed) + } +} + +// Functions returns the set of standard library function declarations and definitions for CEL. +func Functions() []*decls.FunctionDecl { + return stdFunctions +} + +// FunctionExprDecls returns the legacy style protobuf-typed declarations for all functions and overloads +// in the CEL standard environment. +// +// Deprecated: use Functions +func FunctionExprDecls() []*exprpb.Decl { + return stdFnDecls +} + +// Types returns the set of standard library types for CEL. +func Types() []*decls.VariableDecl { + return stdTypes +} + +// TypeExprDecls returns the legacy style protobuf-typed declarations for all types in the CEL +// standard environment. +// +// Deprecated: use Types +func TypeExprDecls() []*exprpb.Decl { + return stdTypeDecls +} + +func notStrictlyFalse(value ref.Val) ref.Val { + if types.IsBool(value) { + return value + } + return types.True +} + +func inAggregate(lhs ref.Val, rhs ref.Val) ref.Val { + if rhs.Type().HasTrait(traits.ContainerType) { + return rhs.(traits.Container).Contains(lhs) + } + return types.ValOrErr(rhs, "no such overload") +} + +func function(name string, opts ...decls.FunctionOpt) *decls.FunctionDecl { + fn, err := decls.NewFunction(name, opts...) + if err != nil { + panic(err) + } + return fn +} + +func argTypes(args ...*types.Type) []*types.Type { + return args +} + +func noBinaryOverrides(rhs, lhs ref.Val) ref.Val { + return types.NoSuchOverloadErr() +} + +func noFunctionOverrides(args ...ref.Val) ref.Val { + return types.NoSuchOverloadErr() +} + +func identity(val ref.Val) ref.Val { + return val +} + +func convertToType(t ref.Type) functions.UnaryOp { + return func(val ref.Val) ref.Val { + return val.ConvertToType(t) + } +} diff --git a/vendor/github.com/google/cel-go/common/types/BUILD.bazel b/vendor/github.com/google/cel-go/common/types/BUILD.bazel index 89c4feacb..b5e44ffbf 100644 --- a/vendor/github.com/google/cel-go/common/types/BUILD.bazel +++ b/vendor/github.com/google/cel-go/common/types/BUILD.bazel @@ -27,20 +27,20 @@ go_library( "provider.go", "string.go", "timestamp.go", - "type.go", + "types.go", "uint.go", "unknown.go", "util.go", ], importpath = "github.com/google/cel-go/common/types", deps = [ + "//checker/decls:go_default_library", "//common/overloads:go_default_library", "//common/types/pb:go_default_library", "//common/types/ref:go_default_library", "//common/types/traits:go_default_library", "@com_github_stoewer_go_strcase//:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", - "@org_golang_google_genproto_googleapis_rpc//status:go_default_library", "@org_golang_google_protobuf//encoding/protojson:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//reflect/protoreflect:go_default_library", @@ -71,8 +71,9 @@ go_test( "provider_test.go", "string_test.go", "timestamp_test.go", - "type_test.go", + "types_test.go", "uint_test.go", + "unknown_test.go", "util_test.go", ], embed = [":go_default_library"], diff --git a/vendor/github.com/google/cel-go/common/types/bool.go b/vendor/github.com/google/cel-go/common/types/bool.go index a634ecc28..565734f3f 100644 --- a/vendor/github.com/google/cel-go/common/types/bool.go +++ b/vendor/github.com/google/cel-go/common/types/bool.go @@ -20,7 +20,6 @@ import ( "strconv" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -31,11 +30,6 @@ import ( type Bool bool var ( - // BoolType singleton. - BoolType = NewTypeValue("bool", - traits.ComparerType, - traits.NegatorType) - // boolWrapperType golang reflected type for protobuf bool wrapper type. boolWrapperType = reflect.TypeOf(&wrapperspb.BoolValue{}) ) diff --git a/vendor/github.com/google/cel-go/common/types/bytes.go b/vendor/github.com/google/cel-go/common/types/bytes.go index bef190759..5838755f8 100644 --- a/vendor/github.com/google/cel-go/common/types/bytes.go +++ b/vendor/github.com/google/cel-go/common/types/bytes.go @@ -22,7 +22,6 @@ import ( "unicode/utf8" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -34,12 +33,6 @@ import ( type Bytes []byte var ( - // BytesType singleton. - BytesType = NewTypeValue("bytes", - traits.AdderType, - traits.ComparerType, - traits.SizerType) - // byteWrapperType golang reflected type for protobuf bytes wrapper type. byteWrapperType = reflect.TypeOf(&wrapperspb.BytesValue{}) ) diff --git a/vendor/github.com/google/cel-go/common/types/double.go b/vendor/github.com/google/cel-go/common/types/double.go index bda9f31a6..027e78978 100644 --- a/vendor/github.com/google/cel-go/common/types/double.go +++ b/vendor/github.com/google/cel-go/common/types/double.go @@ -20,7 +20,6 @@ import ( "reflect" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -32,15 +31,6 @@ import ( type Double float64 var ( - // DoubleType singleton. - DoubleType = NewTypeValue("double", - traits.AdderType, - traits.ComparerType, - traits.DividerType, - traits.MultiplierType, - traits.NegatorType, - traits.SubtractorType) - // doubleWrapperType reflected type for protobuf double wrapper type. doubleWrapperType = reflect.TypeOf(&wrapperspb.DoubleValue{}) diff --git a/vendor/github.com/google/cel-go/common/types/duration.go b/vendor/github.com/google/cel-go/common/types/duration.go index c90ac1bee..596e56d6b 100644 --- a/vendor/github.com/google/cel-go/common/types/duration.go +++ b/vendor/github.com/google/cel-go/common/types/duration.go @@ -22,7 +22,6 @@ import ( "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" dpb "google.golang.org/protobuf/types/known/durationpb" @@ -41,13 +40,14 @@ func durationOf(d time.Duration) Duration { } var ( - // DurationType singleton. - DurationType = NewTypeValue("google.protobuf.Duration", - traits.AdderType, - traits.ComparerType, - traits.NegatorType, - traits.ReceiverType, - traits.SubtractorType) + durationValueType = reflect.TypeOf(&dpb.Duration{}) + + durationZeroArgOverloads = map[string]func(ref.Val) ref.Val{ + overloads.TimeGetHours: DurationGetHours, + overloads.TimeGetMinutes: DurationGetMinutes, + overloads.TimeGetSeconds: DurationGetSeconds, + overloads.TimeGetMilliseconds: DurationGetMilliseconds, + } ) // Add implements traits.Adder.Add. @@ -156,7 +156,7 @@ func (d Duration) Negate() ref.Val { func (d Duration) Receive(function string, overload string, args []ref.Val) ref.Val { if len(args) == 0 { if f, found := durationZeroArgOverloads[function]; found { - return f(d.Duration) + return f(d) } } return NoSuchOverloadErr() @@ -185,20 +185,38 @@ func (d Duration) Value() any { return d.Duration } -var ( - durationValueType = reflect.TypeOf(&dpb.Duration{}) +// DurationGetHours returns the duration in hours. +func DurationGetHours(val ref.Val) ref.Val { + dur, ok := val.(Duration) + if !ok { + return MaybeNoSuchOverloadErr(val) + } + return Int(dur.Hours()) +} - durationZeroArgOverloads = map[string]func(time.Duration) ref.Val{ - overloads.TimeGetHours: func(dur time.Duration) ref.Val { - return Int(dur.Hours()) - }, - overloads.TimeGetMinutes: func(dur time.Duration) ref.Val { - return Int(dur.Minutes()) - }, - overloads.TimeGetSeconds: func(dur time.Duration) ref.Val { - return Int(dur.Seconds()) - }, - overloads.TimeGetMilliseconds: func(dur time.Duration) ref.Val { - return Int(dur.Milliseconds()) - }} -) +// DurationGetMinutes returns duration in minutes. +func DurationGetMinutes(val ref.Val) ref.Val { + dur, ok := val.(Duration) + if !ok { + return MaybeNoSuchOverloadErr(val) + } + return Int(dur.Minutes()) +} + +// DurationGetSeconds returns duration in seconds. +func DurationGetSeconds(val ref.Val) ref.Val { + dur, ok := val.(Duration) + if !ok { + return MaybeNoSuchOverloadErr(val) + } + return Int(dur.Seconds()) +} + +// DurationGetMilliseconds returns duration in milliseconds. +func DurationGetMilliseconds(val ref.Val) ref.Val { + dur, ok := val.(Duration) + if !ok { + return MaybeNoSuchOverloadErr(val) + } + return Int(dur.Milliseconds()) +} diff --git a/vendor/github.com/google/cel-go/common/types/err.go b/vendor/github.com/google/cel-go/common/types/err.go index b4874d9d4..aa8f94b4f 100644 --- a/vendor/github.com/google/cel-go/common/types/err.go +++ b/vendor/github.com/google/cel-go/common/types/err.go @@ -35,7 +35,7 @@ type Err struct { var ( // ErrType singleton. - ErrType = NewTypeValue("error") + ErrType = NewOpaqueType("error") // errDivideByZero is an error indicating a division by zero of an integer value. errDivideByZero = errors.New("division by zero") @@ -129,6 +129,11 @@ func (e *Err) Is(target error) bool { return e.error.Error() == target.Error() } +// Unwrap implements errors.Unwrap. +func (e *Err) Unwrap() error { + return e.error +} + // IsError returns whether the input element ref.Type or ref.Val is equal to // the ErrType singleton. func IsError(val ref.Val) bool { diff --git a/vendor/github.com/google/cel-go/common/types/int.go b/vendor/github.com/google/cel-go/common/types/int.go index f5a9511c8..940772aed 100644 --- a/vendor/github.com/google/cel-go/common/types/int.go +++ b/vendor/github.com/google/cel-go/common/types/int.go @@ -22,7 +22,6 @@ import ( "time" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -41,16 +40,6 @@ const ( ) var ( - // IntType singleton. - IntType = NewTypeValue("int", - traits.AdderType, - traits.ComparerType, - traits.DividerType, - traits.ModderType, - traits.MultiplierType, - traits.NegatorType, - traits.SubtractorType) - // int32WrapperType reflected type for protobuf int32 wrapper type. int32WrapperType = reflect.TypeOf(&wrapperspb.Int32Value{}) diff --git a/vendor/github.com/google/cel-go/common/types/iterator.go b/vendor/github.com/google/cel-go/common/types/iterator.go index 9f224ad4f..98e9147b6 100644 --- a/vendor/github.com/google/cel-go/common/types/iterator.go +++ b/vendor/github.com/google/cel-go/common/types/iterator.go @@ -24,7 +24,7 @@ import ( var ( // IteratorType singleton. - IteratorType = NewTypeValue("iterator", traits.IteratorType) + IteratorType = NewObjectType("iterator", traits.IteratorType) ) // baseIterator is the basis for list, map, and object iterators. diff --git a/vendor/github.com/google/cel-go/common/types/list.go b/vendor/github.com/google/cel-go/common/types/list.go index de5f2099b..d4932b4a9 100644 --- a/vendor/github.com/google/cel-go/common/types/list.go +++ b/vendor/github.com/google/cel-go/common/types/list.go @@ -29,25 +29,15 @@ import ( structpb "google.golang.org/protobuf/types/known/structpb" ) -var ( - // ListType singleton. - ListType = NewTypeValue("list", - traits.AdderType, - traits.ContainerType, - traits.IndexerType, - traits.IterableType, - traits.SizerType) -) - // NewDynamicList returns a traits.Lister with heterogenous elements. // value should be an array of "native" types, i.e. any type that // NativeToValue() can convert to a ref.Val. -func NewDynamicList(adapter ref.TypeAdapter, value any) traits.Lister { +func NewDynamicList(adapter Adapter, value any) traits.Lister { refValue := reflect.ValueOf(value) return &baseList{ - TypeAdapter: adapter, - value: value, - size: refValue.Len(), + Adapter: adapter, + value: value, + size: refValue.Len(), get: func(i int) any { return refValue.Index(i).Interface() }, @@ -55,56 +45,56 @@ func NewDynamicList(adapter ref.TypeAdapter, value any) traits.Lister { } // NewStringList returns a traits.Lister containing only strings. -func NewStringList(adapter ref.TypeAdapter, elems []string) traits.Lister { +func NewStringList(adapter Adapter, elems []string) traits.Lister { return &baseList{ - TypeAdapter: adapter, - value: elems, - size: len(elems), - get: func(i int) any { return elems[i] }, + Adapter: adapter, + value: elems, + size: len(elems), + get: func(i int) any { return elems[i] }, } } // NewRefValList returns a traits.Lister with ref.Val elements. // // This type specialization is used with list literals within CEL expressions. -func NewRefValList(adapter ref.TypeAdapter, elems []ref.Val) traits.Lister { +func NewRefValList(adapter Adapter, elems []ref.Val) traits.Lister { return &baseList{ - TypeAdapter: adapter, - value: elems, - size: len(elems), - get: func(i int) any { return elems[i] }, + Adapter: adapter, + value: elems, + size: len(elems), + get: func(i int) any { return elems[i] }, } } // NewProtoList returns a traits.Lister based on a pb.List instance. -func NewProtoList(adapter ref.TypeAdapter, list protoreflect.List) traits.Lister { +func NewProtoList(adapter Adapter, list protoreflect.List) traits.Lister { return &baseList{ - TypeAdapter: adapter, - value: list, - size: list.Len(), - get: func(i int) any { return list.Get(i).Interface() }, + Adapter: adapter, + value: list, + size: list.Len(), + get: func(i int) any { return list.Get(i).Interface() }, } } // NewJSONList returns a traits.Lister based on structpb.ListValue instance. -func NewJSONList(adapter ref.TypeAdapter, l *structpb.ListValue) traits.Lister { +func NewJSONList(adapter Adapter, l *structpb.ListValue) traits.Lister { vals := l.GetValues() return &baseList{ - TypeAdapter: adapter, - value: l, - size: len(vals), - get: func(i int) any { return vals[i] }, + Adapter: adapter, + value: l, + size: len(vals), + get: func(i int) any { return vals[i] }, } } // NewMutableList creates a new mutable list whose internal state can be modified. -func NewMutableList(adapter ref.TypeAdapter) traits.MutableLister { +func NewMutableList(adapter Adapter) traits.MutableLister { var mutableValues []ref.Val l := &mutableList{ baseList: &baseList{ - TypeAdapter: adapter, - value: mutableValues, - size: 0, + Adapter: adapter, + value: mutableValues, + size: 0, }, mutableValues: mutableValues, } @@ -116,9 +106,9 @@ func NewMutableList(adapter ref.TypeAdapter) traits.MutableLister { // baseList points to a list containing elements of any type. // The `value` is an array of native values, and refValue is its reflection object. -// The `ref.TypeAdapter` enables native type to CEL type conversions. +// The `Adapter` enables native type to CEL type conversions. type baseList struct { - ref.TypeAdapter + Adapter value any // size indicates the number of elements within the list. @@ -143,9 +133,9 @@ func (l *baseList) Add(other ref.Val) ref.Val { return l } return &concatList{ - TypeAdapter: l.TypeAdapter, - prevList: l, - nextList: otherList} + Adapter: l.Adapter, + prevList: l, + nextList: otherList} } // Contains implements the traits.Container interface method. @@ -322,13 +312,13 @@ func (l *mutableList) Add(other ref.Val) ref.Val { func (l *mutableList) ToImmutableList() traits.Lister { // The reference to internal state is guaranteed to be safe as this call is only performed // when mutations have been completed. - return NewRefValList(l.TypeAdapter, l.mutableValues) + return NewRefValList(l.Adapter, l.mutableValues) } // concatList combines two list implementations together into a view. -// The `ref.TypeAdapter` enables native type to CEL type conversions. +// The `Adapter` enables native type to CEL type conversions. type concatList struct { - ref.TypeAdapter + Adapter value any prevList traits.Lister nextList traits.Lister @@ -347,9 +337,9 @@ func (l *concatList) Add(other ref.Val) ref.Val { return l } return &concatList{ - TypeAdapter: l.TypeAdapter, - prevList: l, - nextList: otherList} + Adapter: l.Adapter, + prevList: l, + nextList: otherList} } // Contains implements the traits.Container interface method. @@ -376,7 +366,7 @@ func (l *concatList) Contains(elem ref.Val) ref.Val { // ConvertToNative implements the ref.Val interface method. func (l *concatList) ConvertToNative(typeDesc reflect.Type) (any, error) { - combined := NewDynamicList(l.TypeAdapter, l.Value().([]any)) + combined := NewDynamicList(l.Adapter, l.Value().([]any)) return combined.ConvertToNative(typeDesc) } diff --git a/vendor/github.com/google/cel-go/common/types/map.go b/vendor/github.com/google/cel-go/common/types/map.go index 213be4ac9..739b7aab0 100644 --- a/vendor/github.com/google/cel-go/common/types/map.go +++ b/vendor/github.com/google/cel-go/common/types/map.go @@ -32,10 +32,10 @@ import ( ) // NewDynamicMap returns a traits.Mapper value with dynamic key, value pairs. -func NewDynamicMap(adapter ref.TypeAdapter, value any) traits.Mapper { +func NewDynamicMap(adapter Adapter, value any) traits.Mapper { refValue := reflect.ValueOf(value) return &baseMap{ - TypeAdapter: adapter, + Adapter: adapter, mapAccessor: newReflectMapAccessor(adapter, refValue), value: value, size: refValue.Len(), @@ -46,10 +46,10 @@ func NewDynamicMap(adapter ref.TypeAdapter, value any) traits.Mapper { // encoded in protocol buffer form. // // The `adapter` argument provides type adaptation capabilities from proto to CEL. -func NewJSONStruct(adapter ref.TypeAdapter, value *structpb.Struct) traits.Mapper { +func NewJSONStruct(adapter Adapter, value *structpb.Struct) traits.Mapper { fields := value.GetFields() return &baseMap{ - TypeAdapter: adapter, + Adapter: adapter, mapAccessor: newJSONStructAccessor(adapter, fields), value: value, size: len(fields), @@ -57,9 +57,9 @@ func NewJSONStruct(adapter ref.TypeAdapter, value *structpb.Struct) traits.Mappe } // NewRefValMap returns a specialized traits.Mapper with CEL valued keys and values. -func NewRefValMap(adapter ref.TypeAdapter, value map[ref.Val]ref.Val) traits.Mapper { +func NewRefValMap(adapter Adapter, value map[ref.Val]ref.Val) traits.Mapper { return &baseMap{ - TypeAdapter: adapter, + Adapter: adapter, mapAccessor: newRefValMapAccessor(value), value: value, size: len(value), @@ -67,9 +67,9 @@ func NewRefValMap(adapter ref.TypeAdapter, value map[ref.Val]ref.Val) traits.Map } // NewStringInterfaceMap returns a specialized traits.Mapper with string keys and interface values. -func NewStringInterfaceMap(adapter ref.TypeAdapter, value map[string]any) traits.Mapper { +func NewStringInterfaceMap(adapter Adapter, value map[string]any) traits.Mapper { return &baseMap{ - TypeAdapter: adapter, + Adapter: adapter, mapAccessor: newStringIfaceMapAccessor(adapter, value), value: value, size: len(value), @@ -77,9 +77,9 @@ func NewStringInterfaceMap(adapter ref.TypeAdapter, value map[string]any) traits } // NewStringStringMap returns a specialized traits.Mapper with string keys and values. -func NewStringStringMap(adapter ref.TypeAdapter, value map[string]string) traits.Mapper { +func NewStringStringMap(adapter Adapter, value map[string]string) traits.Mapper { return &baseMap{ - TypeAdapter: adapter, + Adapter: adapter, mapAccessor: newStringMapAccessor(value), value: value, size: len(value), @@ -87,22 +87,13 @@ func NewStringStringMap(adapter ref.TypeAdapter, value map[string]string) traits } // NewProtoMap returns a specialized traits.Mapper for handling protobuf map values. -func NewProtoMap(adapter ref.TypeAdapter, value *pb.Map) traits.Mapper { +func NewProtoMap(adapter Adapter, value *pb.Map) traits.Mapper { return &protoMap{ - TypeAdapter: adapter, - value: value, + Adapter: adapter, + value: value, } } -var ( - // MapType singleton. - MapType = NewTypeValue("map", - traits.ContainerType, - traits.IndexerType, - traits.IterableType, - traits.SizerType) -) - // mapAccessor is a private interface for finding values within a map and iterating over the keys. // This interface implements portions of the API surface area required by the traits.Mapper // interface. @@ -121,7 +112,7 @@ type mapAccessor interface { // Since CEL is side-effect free, the base map represents an immutable object. type baseMap struct { // TypeAdapter used to convert keys and values accessed within the map. - ref.TypeAdapter + Adapter // mapAccessor interface implementation used to find and iterate over map keys. mapAccessor @@ -316,15 +307,15 @@ func (m *baseMap) Value() any { return m.value } -func newJSONStructAccessor(adapter ref.TypeAdapter, st map[string]*structpb.Value) mapAccessor { +func newJSONStructAccessor(adapter Adapter, st map[string]*structpb.Value) mapAccessor { return &jsonStructAccessor{ - TypeAdapter: adapter, - st: st, + Adapter: adapter, + st: st, } } type jsonStructAccessor struct { - ref.TypeAdapter + Adapter st map[string]*structpb.Value } @@ -359,17 +350,17 @@ func (a *jsonStructAccessor) Iterator() traits.Iterator { } } -func newReflectMapAccessor(adapter ref.TypeAdapter, value reflect.Value) mapAccessor { +func newReflectMapAccessor(adapter Adapter, value reflect.Value) mapAccessor { keyType := value.Type().Key() return &reflectMapAccessor{ - TypeAdapter: adapter, - refValue: value, - keyType: keyType, + Adapter: adapter, + refValue: value, + keyType: keyType, } } type reflectMapAccessor struct { - ref.TypeAdapter + Adapter refValue reflect.Value keyType reflect.Type } @@ -427,9 +418,9 @@ func (m *reflectMapAccessor) findInternal(key ref.Val) (ref.Val, bool) { // Iterator creates a Golang reflection based traits.Iterator. func (m *reflectMapAccessor) Iterator() traits.Iterator { return &mapIterator{ - TypeAdapter: m.TypeAdapter, - mapKeys: m.refValue.MapRange(), - len: m.refValue.Len(), + Adapter: m.Adapter, + mapKeys: m.refValue.MapRange(), + len: m.refValue.Len(), } } @@ -480,9 +471,9 @@ func (a *refValMapAccessor) Find(key ref.Val) (ref.Val, bool) { // Iterator produces a new traits.Iterator which iterates over the map keys via Golang reflection. func (a *refValMapAccessor) Iterator() traits.Iterator { return &mapIterator{ - TypeAdapter: DefaultTypeAdapter, - mapKeys: reflect.ValueOf(a.mapVal).MapRange(), - len: len(a.mapVal), + Adapter: DefaultTypeAdapter, + mapKeys: reflect.ValueOf(a.mapVal).MapRange(), + len: len(a.mapVal), } } @@ -524,15 +515,15 @@ func (a *stringMapAccessor) Iterator() traits.Iterator { } } -func newStringIfaceMapAccessor(adapter ref.TypeAdapter, mapVal map[string]any) mapAccessor { +func newStringIfaceMapAccessor(adapter Adapter, mapVal map[string]any) mapAccessor { return &stringIfaceMapAccessor{ - TypeAdapter: adapter, - mapVal: mapVal, + Adapter: adapter, + mapVal: mapVal, } } type stringIfaceMapAccessor struct { - ref.TypeAdapter + Adapter mapVal map[string]any } @@ -569,7 +560,7 @@ func (a *stringIfaceMapAccessor) Iterator() traits.Iterator { // protoMap is a specialized, separate implementation of the traits.Mapper interfaces tailored to // accessing protoreflect.Map values. type protoMap struct { - ref.TypeAdapter + Adapter value *pb.Map } @@ -772,9 +763,9 @@ func (m *protoMap) Iterator() traits.Iterator { return true }) return &protoMapIterator{ - TypeAdapter: m.TypeAdapter, - mapKeys: mapKeys, - len: m.value.Len(), + Adapter: m.Adapter, + mapKeys: mapKeys, + len: m.value.Len(), } } @@ -795,7 +786,7 @@ func (m *protoMap) Value() any { type mapIterator struct { *baseIterator - ref.TypeAdapter + Adapter mapKeys *reflect.MapIter cursor int len int @@ -818,7 +809,7 @@ func (it *mapIterator) Next() ref.Val { type protoMapIterator struct { *baseIterator - ref.TypeAdapter + Adapter mapKeys []protoreflect.MapKey cursor int len int diff --git a/vendor/github.com/google/cel-go/common/types/null.go b/vendor/github.com/google/cel-go/common/types/null.go index 38927a112..926ca3dc9 100644 --- a/vendor/github.com/google/cel-go/common/types/null.go +++ b/vendor/github.com/google/cel-go/common/types/null.go @@ -30,8 +30,6 @@ import ( type Null structpb.NullValue var ( - // NullType singleton. - NullType = NewTypeValue("null_type") // NullValue singleton. NullValue = Null(structpb.NullValue_NULL_VALUE) diff --git a/vendor/github.com/google/cel-go/common/types/object.go b/vendor/github.com/google/cel-go/common/types/object.go index 9955e2dce..8ba0af9fb 100644 --- a/vendor/github.com/google/cel-go/common/types/object.go +++ b/vendor/github.com/google/cel-go/common/types/object.go @@ -29,10 +29,10 @@ import ( ) type protoObj struct { - ref.TypeAdapter + Adapter value proto.Message typeDesc *pb.TypeDescription - typeValue *TypeValue + typeValue ref.Val } // NewObject returns an object based on a proto.Message value which handles @@ -42,15 +42,15 @@ type protoObj struct { // Note: the type value is pulled from the list of registered types within the // type provider. If the proto type is not registered within the type provider, // then this will result in an error within the type adapter / provider. -func NewObject(adapter ref.TypeAdapter, +func NewObject(adapter Adapter, typeDesc *pb.TypeDescription, - typeValue *TypeValue, + typeValue ref.Val, value proto.Message) ref.Val { return &protoObj{ - TypeAdapter: adapter, - value: value, - typeDesc: typeDesc, - typeValue: typeValue} + Adapter: adapter, + value: value, + typeDesc: typeDesc, + typeValue: typeValue} } func (o *protoObj) ConvertToNative(typeDesc reflect.Type) (any, error) { @@ -157,7 +157,7 @@ func (o *protoObj) Get(index ref.Val) ref.Val { } func (o *protoObj) Type() ref.Type { - return o.typeValue + return o.typeValue.(ref.Type) } func (o *protoObj) Value() any { diff --git a/vendor/github.com/google/cel-go/common/types/optional.go b/vendor/github.com/google/cel-go/common/types/optional.go index 54cb35b1a..a9f30aed0 100644 --- a/vendor/github.com/google/cel-go/common/types/optional.go +++ b/vendor/github.com/google/cel-go/common/types/optional.go @@ -24,7 +24,7 @@ import ( var ( // OptionalType indicates the runtime type of an optional value. - OptionalType = NewTypeValue("optional") + OptionalType = NewOpaqueType("optional") // OptionalNone is a sentinel value which is used to indicate an empty optional value. OptionalNone = &Optional{} diff --git a/vendor/github.com/google/cel-go/common/types/pb/type.go b/vendor/github.com/google/cel-go/common/types/pb/type.go index df9532156..6cc95c276 100644 --- a/vendor/github.com/google/cel-go/common/types/pb/type.go +++ b/vendor/github.com/google/cel-go/common/types/pb/type.go @@ -285,7 +285,7 @@ func (fd *FieldDescription) GetFrom(target any) (any, error) { // IsEnum returns true if the field type refers to an enum value. func (fd *FieldDescription) IsEnum() bool { - return fd.desc.Kind() == protoreflect.EnumKind + return fd.ProtoKind() == protoreflect.EnumKind } // IsMap returns true if the field is of map type. @@ -295,7 +295,7 @@ func (fd *FieldDescription) IsMap() bool { // IsMessage returns true if the field is of message type. func (fd *FieldDescription) IsMessage() bool { - kind := fd.desc.Kind() + kind := fd.ProtoKind() return kind == protoreflect.MessageKind || kind == protoreflect.GroupKind } @@ -326,6 +326,11 @@ func (fd *FieldDescription) Name() string { return string(fd.desc.Name()) } +// ProtoKind returns the protobuf reflected kind of the field. +func (fd *FieldDescription) ProtoKind() protoreflect.Kind { + return fd.desc.Kind() +} + // ReflectType returns the Golang reflect.Type for this field. func (fd *FieldDescription) ReflectType() reflect.Type { return fd.reflectType @@ -345,17 +350,17 @@ func (fd *FieldDescription) Zero() proto.Message { } func (fd *FieldDescription) typeDefToType() *exprpb.Type { - if fd.desc.Kind() == protoreflect.MessageKind || fd.desc.Kind() == protoreflect.GroupKind { + if fd.IsMessage() { msgType := string(fd.desc.Message().FullName()) if wk, found := CheckedWellKnowns[msgType]; found { return wk } return checkedMessageType(msgType) } - if fd.desc.Kind() == protoreflect.EnumKind { + if fd.IsEnum() { return checkedInt } - return CheckedPrimitives[fd.desc.Kind()] + return CheckedPrimitives[fd.ProtoKind()] } // Map wraps the protoreflect.Map object with a key and value FieldDescription for use in @@ -463,13 +468,13 @@ func unwrapDynamic(desc description, refMsg protoreflect.Message) (any, bool, er unwrappedAny := &anypb.Any{} err := Merge(unwrappedAny, msg) if err != nil { - return nil, false, err + return nil, false, fmt.Errorf("unwrap dynamic field failed: %v", err) } dynMsg, err := unwrappedAny.UnmarshalNew() if err != nil { // Allow the error to move further up the stack as it should result in an type // conversion error if the caller does not recover it somehow. - return nil, false, err + return nil, false, fmt.Errorf("unmarshal dynamic any failed: %v", err) } // Attempt to unwrap the dynamic type, otherwise return the dynamic message. unwrapped, nested, err := unwrapDynamic(desc, dynMsg.ProtoReflect()) @@ -560,8 +565,10 @@ func zeroValueOf(msg proto.Message) proto.Message { } var ( + jsonValueTypeURL = "types.googleapis.com/google.protobuf.Value" + zeroValueMap = map[string]proto.Message{ - "google.protobuf.Any": &anypb.Any{}, + "google.protobuf.Any": &anypb.Any{TypeUrl: jsonValueTypeURL}, "google.protobuf.Duration": &dpb.Duration{}, "google.protobuf.ListValue": &structpb.ListValue{}, "google.protobuf.Struct": &structpb.Struct{}, diff --git a/vendor/github.com/google/cel-go/common/types/provider.go b/vendor/github.com/google/cel-go/common/types/provider.go index e66951f5b..d301aa38a 100644 --- a/vendor/github.com/google/cel-go/common/types/provider.go +++ b/vendor/github.com/google/cel-go/common/types/provider.go @@ -33,17 +33,68 @@ import ( tpb "google.golang.org/protobuf/types/known/timestamppb" ) -type protoTypeRegistry struct { - revTypeMap map[string]ref.Type +// Adapter converts native Go values of varying type and complexity to equivalent CEL values. +type Adapter = ref.TypeAdapter + +// Provider specifies functions for creating new object instances and for resolving +// enum values by name. +type Provider interface { + // EnumValue returns the numeric value of the given enum value name. + EnumValue(enumName string) ref.Val + + // FindIdent takes a qualified identifier name and returns a ref.Val if one exists. + FindIdent(identName string) (ref.Val, bool) + + // FindStructType returns the Type give a qualified type name. + // + // For historical reasons, only struct types are expected to be returned through this + // method, and the type values are expected to be wrapped in a TypeType instance using + // TypeTypeWithParam(). + // + // Returns false if not found. + FindStructType(structType string) (*Type, bool) + + // FindStructFieldNames returns thet field names associated with the type, if the type + // is found. + FindStructFieldNames(structType string) ([]string, bool) + + // FieldStructFieldType returns the field type for a checked type value. Returns + // false if the field could not be found. + FindStructFieldType(structType, fieldName string) (*FieldType, bool) + + // NewValue creates a new type value from a qualified name and map of field + // name to value. + // + // Note, for each value, the Val.ConvertToNative function will be invoked + // to convert the Val to the field's native type. If an error occurs during + // conversion, the NewValue will be a types.Err. + NewValue(structType string, fields map[string]ref.Val) ref.Val +} + +// FieldType represents a field's type value and whether that field supports presence detection. +type FieldType struct { + // Type of the field as a CEL native type value. + Type *Type + + // IsSet indicates whether the field is set on an input object. + IsSet ref.FieldTester + + // GetFrom retrieves the field value on the input object, if set. + GetFrom ref.FieldGetter +} + +// Registry provides type information for a set of registered types. +type Registry struct { + revTypeMap map[string]*Type pbdb *pb.Db } // NewRegistry accepts a list of proto message instances and returns a type // provider which can create new instances of the provided message or any // message that proto depends upon in its FileDescriptor. -func NewRegistry(types ...proto.Message) (ref.TypeRegistry, error) { - p := &protoTypeRegistry{ - revTypeMap: make(map[string]ref.Type), +func NewRegistry(types ...proto.Message) (*Registry, error) { + p := &Registry{ + revTypeMap: make(map[string]*Type), pbdb: pb.NewDb(), } err := p.RegisterType( @@ -79,18 +130,17 @@ func NewRegistry(types ...proto.Message) (ref.TypeRegistry, error) { } // NewEmptyRegistry returns a registry which is completely unconfigured. -func NewEmptyRegistry() ref.TypeRegistry { - return &protoTypeRegistry{ - revTypeMap: make(map[string]ref.Type), +func NewEmptyRegistry() *Registry { + return &Registry{ + revTypeMap: make(map[string]*Type), pbdb: pb.NewDb(), } } -// Copy implements the ref.TypeRegistry interface method which copies the current state of the -// registry into its own memory space. -func (p *protoTypeRegistry) Copy() ref.TypeRegistry { - copy := &protoTypeRegistry{ - revTypeMap: make(map[string]ref.Type), +// Copy copies the current state of the registry into its own memory space. +func (p *Registry) Copy() *Registry { + copy := &Registry{ + revTypeMap: make(map[string]*Type), pbdb: p.pbdb.Copy(), } for k, v := range p.revTypeMap { @@ -99,7 +149,8 @@ func (p *protoTypeRegistry) Copy() ref.TypeRegistry { return copy } -func (p *protoTypeRegistry) EnumValue(enumName string) ref.Val { +// EnumValue returns the numeric value of the given enum value name. +func (p *Registry) EnumValue(enumName string) ref.Val { enumVal, found := p.pbdb.DescribeEnum(enumName) if !found { return NewErr("unknown enum name '%s'", enumName) @@ -107,9 +158,12 @@ func (p *protoTypeRegistry) EnumValue(enumName string) ref.Val { return Int(enumVal.Value()) } -func (p *protoTypeRegistry) FindFieldType(messageType string, - fieldName string) (*ref.FieldType, bool) { - msgType, found := p.pbdb.DescribeType(messageType) +// FindFieldType returns the field type for a checked type value. Returns false if +// the field could not be found. +// +// Deprecated: use FindStructFieldType +func (p *Registry) FindFieldType(structType, fieldName string) (*ref.FieldType, bool) { + msgType, found := p.pbdb.DescribeType(structType) if !found { return nil, false } @@ -118,15 +172,49 @@ func (p *protoTypeRegistry) FindFieldType(messageType string, return nil, false } return &ref.FieldType{ - Type: field.CheckedType(), - IsSet: field.IsSet, - GetFrom: field.GetFrom}, - true + Type: field.CheckedType(), + IsSet: field.IsSet, + GetFrom: field.GetFrom}, true } -func (p *protoTypeRegistry) FindIdent(identName string) (ref.Val, bool) { +// FindStructFieldNames returns the set of field names for the given struct type, +// if the type exists in the registry. +func (p *Registry) FindStructFieldNames(structType string) ([]string, bool) { + msgType, found := p.pbdb.DescribeType(structType) + if !found { + return []string{}, false + } + fieldMap := msgType.FieldMap() + fields := make([]string, len(fieldMap)) + idx := 0 + for f := range fieldMap { + fields[idx] = f + idx++ + } + return fields, true +} + +// FindStructFieldType returns the field type for a checked type value. Returns +// false if the field could not be found. +func (p *Registry) FindStructFieldType(structType, fieldName string) (*FieldType, bool) { + msgType, found := p.pbdb.DescribeType(structType) + if !found { + return nil, false + } + field, found := msgType.FieldByName(fieldName) + if !found { + return nil, false + } + return &FieldType{ + Type: fieldDescToCELType(field), + IsSet: field.IsSet, + GetFrom: field.GetFrom}, true +} + +// FindIdent takes a qualified identifier name and returns a ref.Val if one exists. +func (p *Registry) FindIdent(identName string) (ref.Val, bool) { if t, found := p.revTypeMap[identName]; found { - return t.(ref.Val), true + return t, true } if enumVal, found := p.pbdb.DescribeEnum(identName); found { return Int(enumVal.Value()), true @@ -134,24 +222,50 @@ func (p *protoTypeRegistry) FindIdent(identName string) (ref.Val, bool) { return nil, false } -func (p *protoTypeRegistry) FindType(typeName string) (*exprpb.Type, bool) { - if _, found := p.pbdb.DescribeType(typeName); !found { +// FindType looks up the Type given a qualified typeName. Returns false if not found. +// +// Deprecated: use FindStructType +func (p *Registry) FindType(structType string) (*exprpb.Type, bool) { + if _, found := p.pbdb.DescribeType(structType); !found { return nil, false } - if typeName != "" && typeName[0] == '.' { - typeName = typeName[1:] + if structType != "" && structType[0] == '.' { + structType = structType[1:] } return &exprpb.Type{ TypeKind: &exprpb.Type_Type{ Type: &exprpb.Type{ TypeKind: &exprpb.Type_MessageType{ - MessageType: typeName}}}}, true + MessageType: structType}}}}, true +} + +// FindStructType returns the Type give a qualified type name. +// +// For historical reasons, only struct types are expected to be returned through this +// method, and the type values are expected to be wrapped in a TypeType instance using +// TypeTypeWithParam(). +// +// Returns false if not found. +func (p *Registry) FindStructType(structType string) (*Type, bool) { + if _, found := p.pbdb.DescribeType(structType); !found { + return nil, false + } + if structType != "" && structType[0] == '.' { + structType = structType[1:] + } + return NewTypeTypeWithParam(NewObjectType(structType)), true } -func (p *protoTypeRegistry) NewValue(typeName string, fields map[string]ref.Val) ref.Val { - td, found := p.pbdb.DescribeType(typeName) +// NewValue creates a new type value from a qualified name and map of field +// name to value. +// +// Note, for each value, the Val.ConvertToNative function will be invoked +// to convert the Val to the field's native type. If an error occurs during +// conversion, the NewValue will be a types.Err. +func (p *Registry) NewValue(structType string, fields map[string]ref.Val) ref.Val { + td, found := p.pbdb.DescribeType(structType) if !found { - return NewErr("unknown type '%s'", typeName) + return NewErr("unknown type '%s'", structType) } msg := td.New() fieldMap := td.FieldMap() @@ -168,7 +282,8 @@ func (p *protoTypeRegistry) NewValue(typeName string, fields map[string]ref.Val) return p.NativeToValue(msg.Interface()) } -func (p *protoTypeRegistry) RegisterDescriptor(fileDesc protoreflect.FileDescriptor) error { +// RegisterDescriptor registers the contents of a protocol buffer `FileDescriptor`. +func (p *Registry) RegisterDescriptor(fileDesc protoreflect.FileDescriptor) error { fd, err := p.pbdb.RegisterDescriptor(fileDesc) if err != nil { return err @@ -176,7 +291,8 @@ func (p *protoTypeRegistry) RegisterDescriptor(fileDesc protoreflect.FileDescrip return p.registerAllTypes(fd) } -func (p *protoTypeRegistry) RegisterMessage(message proto.Message) error { +// RegisterMessage registers a protocol buffer message and its dependencies. +func (p *Registry) RegisterMessage(message proto.Message) error { fd, err := p.pbdb.RegisterMessage(message) if err != nil { return err @@ -184,11 +300,32 @@ func (p *protoTypeRegistry) RegisterMessage(message proto.Message) error { return p.registerAllTypes(fd) } -func (p *protoTypeRegistry) RegisterType(types ...ref.Type) error { +// RegisterType registers a type value with the provider which ensures the provider is aware of how to +// map the type to an identifier. +// +// If the `ref.Type` value is a `*types.Type` it will be registered directly by its runtime type name. +// If the `ref.Type` value is not a `*types.Type` instance, a `*types.Type` instance which reflects the +// traits present on the input and the runtime type name. By default this foreign type will be treated +// as a types.StructKind. To avoid potential issues where the `ref.Type` values does not match the +// generated `*types.Type` instance, consider always using the `*types.Type` to represent type extensions +// to CEL, even when they're not based on protobuf types. +func (p *Registry) RegisterType(types ...ref.Type) error { for _, t := range types { - p.revTypeMap[t.TypeName()] = t + celType := maybeForeignType(t) + existing, found := p.revTypeMap[t.TypeName()] + if !found { + p.revTypeMap[t.TypeName()] = celType + continue + } + if !existing.IsEquivalentType(celType) { + return fmt.Errorf("type registration conflict. found: %v, input: %v", existing, celType) + } + if existing.traitMask != celType.traitMask { + return fmt.Errorf( + "type registered with conflicting traits: %v with traits %v, input: %v", + existing.TypeName(), existing.traitMask, celType.traitMask) + } } - // TODO: generate an error when the type name is registered more than once. return nil } @@ -196,7 +333,7 @@ func (p *protoTypeRegistry) RegisterType(types ...ref.Type) error { // providing support for custom proto-based types. // // This method should be the inverse of ref.Val.ConvertToNative. -func (p *protoTypeRegistry) NativeToValue(value any) ref.Val { +func (p *Registry) NativeToValue(value any) ref.Val { if val, found := nativeToValue(p, value); found { return val } @@ -218,7 +355,7 @@ func (p *protoTypeRegistry) NativeToValue(value any) ref.Val { if !found { return NewErr("unknown type: '%s'", typeName) } - return NewObject(p, td, typeVal.(*TypeValue), v) + return NewObject(p, td, typeVal, v) case *pb.Map: return NewProtoMap(p, v) case protoreflect.List: @@ -231,8 +368,13 @@ func (p *protoTypeRegistry) NativeToValue(value any) ref.Val { return UnsupportedRefValConversionErr(value) } -func (p *protoTypeRegistry) registerAllTypes(fd *pb.FileDescription) error { +func (p *Registry) registerAllTypes(fd *pb.FileDescription) error { for _, typeName := range fd.GetTypeNames() { + // skip well-known type names since they're automatically sanitized + // during NewObjectType() calls. + if _, found := checkedWellKnowns[typeName]; found { + continue + } err := p.RegisterType(NewObjectTypeValue(typeName)) if err != nil { return err @@ -241,6 +383,28 @@ func (p *protoTypeRegistry) registerAllTypes(fd *pb.FileDescription) error { return nil } +func fieldDescToCELType(field *pb.FieldDescription) *Type { + if field.IsMap() { + return NewMapType( + singularFieldDescToCELType(field.KeyType), + singularFieldDescToCELType(field.ValueType)) + } + if field.IsList() { + return NewListType(singularFieldDescToCELType(field)) + } + return singularFieldDescToCELType(field) +} + +func singularFieldDescToCELType(field *pb.FieldDescription) *Type { + if field.IsMessage() { + return NewObjectType(string(field.Descriptor().Message().FullName())) + } + if field.IsEnum() { + return IntType + } + return ProtoCELPrimitives[field.ProtoKind()] +} + // defaultTypeAdapter converts go native types to CEL values. type defaultTypeAdapter struct{} @@ -259,7 +423,7 @@ func (a *defaultTypeAdapter) NativeToValue(value any) ref.Val { // nativeToValue returns the converted (ref.Val, true) of a conversion is found, // otherwise (nil, false) -func nativeToValue(a ref.TypeAdapter, value any) (ref.Val, bool) { +func nativeToValue(a Adapter, value any) (ref.Val, bool) { switch v := value.(type) { case nil: return NullValue, true @@ -547,3 +711,24 @@ func fieldTypeConversionError(field *pb.FieldDescription, err error) error { msgName := field.Descriptor().ContainingMessage().FullName() return fmt.Errorf("field type conversion error for %v.%v value type: %v", msgName, field.Name(), err) } + +var ( + // ProtoCELPrimitives provides a map from the protoreflect Kind to the equivalent CEL type. + ProtoCELPrimitives = map[protoreflect.Kind]*Type{ + protoreflect.BoolKind: BoolType, + protoreflect.BytesKind: BytesType, + protoreflect.DoubleKind: DoubleType, + protoreflect.FloatKind: DoubleType, + protoreflect.Int32Kind: IntType, + protoreflect.Int64Kind: IntType, + protoreflect.Sint32Kind: IntType, + protoreflect.Sint64Kind: IntType, + protoreflect.Uint32Kind: UintType, + protoreflect.Uint64Kind: UintType, + protoreflect.Fixed32Kind: UintType, + protoreflect.Fixed64Kind: UintType, + protoreflect.Sfixed32Kind: IntType, + protoreflect.Sfixed64Kind: IntType, + protoreflect.StringKind: StringType, + } +) diff --git a/vendor/github.com/google/cel-go/common/types/ref/provider.go b/vendor/github.com/google/cel-go/common/types/ref/provider.go index 7eabbb9ca..b9820023d 100644 --- a/vendor/github.com/google/cel-go/common/types/ref/provider.go +++ b/vendor/github.com/google/cel-go/common/types/ref/provider.go @@ -23,34 +23,34 @@ import ( // TypeProvider specifies functions for creating new object instances and for // resolving enum values by name. +// +// Deprecated: use types.Provider type TypeProvider interface { // EnumValue returns the numeric value of the given enum value name. EnumValue(enumName string) Val - // FindIdent takes a qualified identifier name and returns a Value if one - // exists. + // FindIdent takes a qualified identifier name and returns a Value if one exists. FindIdent(identName string) (Val, bool) - // FindType looks up the Type given a qualified typeName. Returns false - // if not found. - // - // Used during type-checking only. + // FindType looks up the Type given a qualified typeName. Returns false if not found. FindType(typeName string) (*exprpb.Type, bool) - // FieldFieldType returns the field type for a checked type value. Returns - // false if the field could not be found. - FindFieldType(messageType string, fieldName string) (*FieldType, bool) + // FieldFieldType returns the field type for a checked type value. Returns false if + // the field could not be found. + FindFieldType(messageType, fieldName string) (*FieldType, bool) - // NewValue creates a new type value from a qualified name and map of field - // name to value. + // NewValue creates a new type value from a qualified name and map of field name + // to value. // - // Note, for each value, the Val.ConvertToNative function will be invoked - // to convert the Val to the field's native type. If an error occurs during - // conversion, the NewValue will be a types.Err. + // Note, for each value, the Val.ConvertToNative function will be invoked to convert + // the Val to the field's native type. If an error occurs during conversion, the + // NewValue will be a types.Err. NewValue(typeName string, fields map[string]Val) Val } // TypeAdapter converts native Go values of varying type and complexity to equivalent CEL values. +// +// Deprecated: use types.Adapter type TypeAdapter interface { // NativeToValue converts the input `value` to a CEL `ref.Val`. NativeToValue(value any) Val @@ -60,6 +60,8 @@ type TypeAdapter interface { // implementations support type-customization, so these features are optional. However, a // `TypeRegistry` should be a `TypeProvider` and a `TypeAdapter` to ensure that types // which are registered can be converted to CEL representations. +// +// Deprecated: use types.Registry type TypeRegistry interface { TypeAdapter TypeProvider @@ -76,15 +78,14 @@ type TypeRegistry interface { // If a type is provided more than once with an alternative definition, the // call will result in an error. RegisterType(types ...Type) error - - // Copy the TypeRegistry and return a new registry whose mutable state is isolated. - Copy() TypeRegistry } // FieldType represents a field's type value and whether that field supports // presence detection. +// +// Deprecated: use types.FieldType type FieldType struct { - // Type of the field. + // Type of the field as a protobuf type value. Type *exprpb.Type // IsSet indicates whether the field is set on an input object. diff --git a/vendor/github.com/google/cel-go/common/types/string.go b/vendor/github.com/google/cel-go/common/types/string.go index a65cc14e4..028e6824d 100644 --- a/vendor/github.com/google/cel-go/common/types/string.go +++ b/vendor/github.com/google/cel-go/common/types/string.go @@ -24,7 +24,6 @@ import ( "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -36,18 +35,10 @@ import ( type String string var ( - // StringType singleton. - StringType = NewTypeValue("string", - traits.AdderType, - traits.ComparerType, - traits.MatcherType, - traits.ReceiverType, - traits.SizerType) - - stringOneArgOverloads = map[string]func(String, ref.Val) ref.Val{ - overloads.Contains: stringContains, - overloads.EndsWith: stringEndsWith, - overloads.StartsWith: stringStartsWith, + stringOneArgOverloads = map[string]func(ref.Val, ref.Val) ref.Val{ + overloads.Contains: StringContains, + overloads.EndsWith: StringEndsWith, + overloads.StartsWith: StringStartsWith, } stringWrapperType = reflect.TypeOf(&wrapperspb.StringValue{}) @@ -198,26 +189,41 @@ func (s String) Value() any { return string(s) } -func stringContains(s String, sub ref.Val) ref.Val { +// StringContains returns whether the string contains a substring. +func StringContains(s, sub ref.Val) ref.Val { + str, ok := s.(String) + if !ok { + return MaybeNoSuchOverloadErr(s) + } subStr, ok := sub.(String) if !ok { return MaybeNoSuchOverloadErr(sub) } - return Bool(strings.Contains(string(s), string(subStr))) + return Bool(strings.Contains(string(str), string(subStr))) } -func stringEndsWith(s String, suf ref.Val) ref.Val { +// StringEndsWith returns whether the target string contains the input suffix. +func StringEndsWith(s, suf ref.Val) ref.Val { + str, ok := s.(String) + if !ok { + return MaybeNoSuchOverloadErr(s) + } sufStr, ok := suf.(String) if !ok { return MaybeNoSuchOverloadErr(suf) } - return Bool(strings.HasSuffix(string(s), string(sufStr))) + return Bool(strings.HasSuffix(string(str), string(sufStr))) } -func stringStartsWith(s String, pre ref.Val) ref.Val { +// StringStartsWith returns whether the target string contains the input prefix. +func StringStartsWith(s, pre ref.Val) ref.Val { + str, ok := s.(String) + if !ok { + return MaybeNoSuchOverloadErr(s) + } preStr, ok := pre.(String) if !ok { return MaybeNoSuchOverloadErr(pre) } - return Bool(strings.HasPrefix(string(s), string(preStr))) + return Bool(strings.HasPrefix(string(str), string(preStr))) } diff --git a/vendor/github.com/google/cel-go/common/types/timestamp.go b/vendor/github.com/google/cel-go/common/types/timestamp.go index c784f2e54..33acdea8e 100644 --- a/vendor/github.com/google/cel-go/common/types/timestamp.go +++ b/vendor/github.com/google/cel-go/common/types/timestamp.go @@ -23,7 +23,6 @@ import ( "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -53,15 +52,6 @@ const ( maxUnixTime int64 = 253402300799 ) -var ( - // TimestampType singleton. - TimestampType = NewTypeValue("google.protobuf.Timestamp", - traits.AdderType, - traits.ComparerType, - traits.ReceiverType, - traits.SubtractorType) -) - // Add implements traits.Adder.Add. func (t Timestamp) Add(other ref.Val) ref.Val { switch other.Type() { diff --git a/vendor/github.com/google/cel-go/common/types/type.go b/vendor/github.com/google/cel-go/common/types/type.go deleted file mode 100644 index 164a46050..000000000 --- a/vendor/github.com/google/cel-go/common/types/type.go +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package types - -import ( - "fmt" - "reflect" - - "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" -) - -var ( - // TypeType is the type of a TypeValue. - TypeType = NewTypeValue("type") -) - -// TypeValue is an instance of a Value that describes a value's type. -type TypeValue struct { - name string - traitMask int -} - -// NewTypeValue returns *TypeValue which is both a ref.Type and ref.Val. -func NewTypeValue(name string, traits ...int) *TypeValue { - traitMask := 0 - for _, trait := range traits { - traitMask |= trait - } - return &TypeValue{ - name: name, - traitMask: traitMask} -} - -// NewObjectTypeValue returns a *TypeValue based on the input name, which is -// annotated with the traits relevant to all objects. -func NewObjectTypeValue(name string) *TypeValue { - return NewTypeValue(name, - traits.FieldTesterType, - traits.IndexerType) -} - -// ConvertToNative implements ref.Val.ConvertToNative. -func (t *TypeValue) ConvertToNative(typeDesc reflect.Type) (any, error) { - // TODO: replace the internal type representation with a proto-value. - return nil, fmt.Errorf("type conversion not supported for 'type'") -} - -// ConvertToType implements ref.Val.ConvertToType. -func (t *TypeValue) ConvertToType(typeVal ref.Type) ref.Val { - switch typeVal { - case TypeType: - return TypeType - case StringType: - return String(t.TypeName()) - } - return NewErr("type conversion error from '%s' to '%s'", TypeType, typeVal) -} - -// Equal implements ref.Val.Equal. -func (t *TypeValue) Equal(other ref.Val) ref.Val { - otherType, ok := other.(ref.Type) - return Bool(ok && t.TypeName() == otherType.TypeName()) -} - -// HasTrait indicates whether the type supports the given trait. -// Trait codes are defined in the traits package, e.g. see traits.AdderType. -func (t *TypeValue) HasTrait(trait int) bool { - return trait&t.traitMask == trait -} - -// String implements fmt.Stringer. -func (t *TypeValue) String() string { - return t.name -} - -// Type implements ref.Val.Type. -func (t *TypeValue) Type() ref.Type { - return TypeType -} - -// TypeName gives the type's name as a string. -func (t *TypeValue) TypeName() string { - return t.name -} - -// Value implements ref.Val.Value. -func (t *TypeValue) Value() any { - return t.name -} diff --git a/vendor/github.com/google/cel-go/common/types/types.go b/vendor/github.com/google/cel-go/common/types/types.go new file mode 100644 index 000000000..76624eefd --- /dev/null +++ b/vendor/github.com/google/cel-go/common/types/types.go @@ -0,0 +1,806 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package types + +import ( + "fmt" + "reflect" + "strings" + + chkdecls "github.com/google/cel-go/checker/decls" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" + + exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" +) + +// Kind indicates a CEL type's kind which is used to differentiate quickly between simple +// and complex types. +type Kind uint + +const ( + // UnspecifiedKind is returned when the type is nil or its kind is not specified. + UnspecifiedKind Kind = iota + + // DynKind represents a dynamic type. This kind only exists at type-check time. + DynKind + + // AnyKind represents a google.protobuf.Any type. This kind only exists at type-check time. + // Prefer DynKind to AnyKind as AnyKind has a specific meaning which is based on protobuf + // well-known types. + AnyKind + + // BoolKind represents a boolean type. + BoolKind + + // BytesKind represents a bytes type. + BytesKind + + // DoubleKind represents a double type. + DoubleKind + + // DurationKind represents a CEL duration type. + DurationKind + + // ErrorKind represents a CEL error type. + ErrorKind + + // IntKind represents an integer type. + IntKind + + // ListKind represents a list type. + ListKind + + // MapKind represents a map type. + MapKind + + // NullTypeKind represents a null type. + NullTypeKind + + // OpaqueKind represents an abstract type which has no accessible fields. + OpaqueKind + + // StringKind represents a string type. + StringKind + + // StructKind represents a structured object with typed fields. + StructKind + + // TimestampKind represents a a CEL time type. + TimestampKind + + // TypeKind represents the CEL type. + TypeKind + + // TypeParamKind represents a parameterized type whose type name will be resolved at type-check time, if possible. + TypeParamKind + + // UintKind represents a uint type. + UintKind + + // UnknownKind represents an unknown value type. + UnknownKind +) + +var ( + // AnyType represents the google.protobuf.Any type. + AnyType = &Type{ + kind: AnyKind, + runtimeTypeName: "google.protobuf.Any", + traitMask: traits.FieldTesterType | + traits.IndexerType, + } + // BoolType represents the bool type. + BoolType = &Type{ + kind: BoolKind, + runtimeTypeName: "bool", + traitMask: traits.ComparerType | + traits.NegatorType, + } + // BytesType represents the bytes type. + BytesType = &Type{ + kind: BytesKind, + runtimeTypeName: "bytes", + traitMask: traits.AdderType | + traits.ComparerType | + traits.SizerType, + } + // DoubleType represents the double type. + DoubleType = &Type{ + kind: DoubleKind, + runtimeTypeName: "double", + traitMask: traits.AdderType | + traits.ComparerType | + traits.DividerType | + traits.MultiplierType | + traits.NegatorType | + traits.SubtractorType, + } + // DurationType represents the CEL duration type. + DurationType = &Type{ + kind: DurationKind, + runtimeTypeName: "google.protobuf.Duration", + traitMask: traits.AdderType | + traits.ComparerType | + traits.NegatorType | + traits.ReceiverType | + traits.SubtractorType, + } + // DynType represents a dynamic CEL type whose type will be determined at runtime from context. + DynType = &Type{ + kind: DynKind, + runtimeTypeName: "dyn", + } + // ErrorType represents a CEL error value. + ErrorType = &Type{ + kind: ErrorKind, + runtimeTypeName: "error", + } + // IntType represents the int type. + IntType = &Type{ + kind: IntKind, + runtimeTypeName: "int", + traitMask: traits.AdderType | + traits.ComparerType | + traits.DividerType | + traits.ModderType | + traits.MultiplierType | + traits.NegatorType | + traits.SubtractorType, + } + // ListType represents the runtime list type. + ListType = NewListType(nil) + // MapType represents the runtime map type. + MapType = NewMapType(nil, nil) + // NullType represents the type of a null value. + NullType = &Type{ + kind: NullTypeKind, + runtimeTypeName: "null_type", + } + // StringType represents the string type. + StringType = &Type{ + kind: StringKind, + runtimeTypeName: "string", + traitMask: traits.AdderType | + traits.ComparerType | + traits.MatcherType | + traits.ReceiverType | + traits.SizerType, + } + // TimestampType represents the time type. + TimestampType = &Type{ + kind: TimestampKind, + runtimeTypeName: "google.protobuf.Timestamp", + traitMask: traits.AdderType | + traits.ComparerType | + traits.ReceiverType | + traits.SubtractorType, + } + // TypeType represents a CEL type + TypeType = &Type{ + kind: TypeKind, + runtimeTypeName: "type", + } + // UintType represents a uint type. + UintType = &Type{ + kind: UintKind, + runtimeTypeName: "uint", + traitMask: traits.AdderType | + traits.ComparerType | + traits.DividerType | + traits.ModderType | + traits.MultiplierType | + traits.SubtractorType, + } + // UnknownType represents an unknown value type. + UnknownType = &Type{ + kind: UnknownKind, + runtimeTypeName: "unknown", + } +) + +var _ ref.Type = &Type{} +var _ ref.Val = &Type{} + +// Type holds a reference to a runtime type with an optional type-checked set of type parameters. +type Type struct { + // kind indicates general category of the type. + kind Kind + + // parameters holds the optional type-checked set of type Parameters that are used during static analysis. + parameters []*Type + + // runtimeTypeName indicates the runtime type name of the type. + runtimeTypeName string + + // isAssignableType function determines whether one type is assignable to this type. + // A nil value for the isAssignableType function falls back to equality of kind, runtimeType, and parameters. + isAssignableType func(other *Type) bool + + // isAssignableRuntimeType function determines whether the runtime type (with erasure) is assignable to this type. + // A nil value for the isAssignableRuntimeType function falls back to the equality of the type or type name. + isAssignableRuntimeType func(other ref.Val) bool + + // traitMask is a mask of flags which indicate the capabilities of the type. + traitMask int +} + +// ConvertToNative implements ref.Val.ConvertToNative. +func (t *Type) ConvertToNative(typeDesc reflect.Type) (any, error) { + return nil, fmt.Errorf("type conversion not supported for 'type'") +} + +// ConvertToType implements ref.Val.ConvertToType. +func (t *Type) ConvertToType(typeVal ref.Type) ref.Val { + switch typeVal { + case TypeType: + return TypeType + case StringType: + return String(t.TypeName()) + } + return NewErr("type conversion error from '%s' to '%s'", TypeType, typeVal) +} + +// Equal indicates whether two types have the same runtime type name. +// +// The name Equal is a bit of a misnomer, but for historical reasons, this is the +// runtime behavior. For a more accurate definition see IsType(). +func (t *Type) Equal(other ref.Val) ref.Val { + otherType, ok := other.(ref.Type) + return Bool(ok && t.TypeName() == otherType.TypeName()) +} + +// HasTrait implements the ref.Type interface method. +func (t *Type) HasTrait(trait int) bool { + return trait&t.traitMask == trait +} + +// IsExactType indicates whether the two types are exactly the same. This check also verifies type parameter type names. +func (t *Type) IsExactType(other *Type) bool { + return t.isTypeInternal(other, true) +} + +// IsEquivalentType indicates whether two types are equivalent. This check ignores type parameter type names. +func (t *Type) IsEquivalentType(other *Type) bool { + return t.isTypeInternal(other, false) +} + +// Kind indicates general category of the type. +func (t *Type) Kind() Kind { + if t == nil { + return UnspecifiedKind + } + return t.kind +} + +// isTypeInternal checks whether the two types are equivalent or exactly the same based on the checkTypeParamName flag. +func (t *Type) isTypeInternal(other *Type, checkTypeParamName bool) bool { + if t == nil { + return false + } + if t == other { + return true + } + if t.Kind() != other.Kind() || len(t.Parameters()) != len(other.Parameters()) { + return false + } + if (checkTypeParamName || t.Kind() != TypeParamKind) && t.TypeName() != other.TypeName() { + return false + } + for i, p := range t.Parameters() { + if !p.isTypeInternal(other.Parameters()[i], checkTypeParamName) { + return false + } + } + return true +} + +// IsAssignableType determines whether the current type is type-check assignable from the input fromType. +func (t *Type) IsAssignableType(fromType *Type) bool { + if t == nil { + return false + } + if t.isAssignableType != nil { + return t.isAssignableType(fromType) + } + return t.defaultIsAssignableType(fromType) +} + +// IsAssignableRuntimeType determines whether the current type is runtime assignable from the input runtimeType. +// +// At runtime, parameterized types are erased and so a function which type-checks to support a map(string, string) +// will have a runtime assignable type of a map. +func (t *Type) IsAssignableRuntimeType(val ref.Val) bool { + if t == nil { + return false + } + if t.isAssignableRuntimeType != nil { + return t.isAssignableRuntimeType(val) + } + return t.defaultIsAssignableRuntimeType(val) +} + +// Parameters returns the list of type parameters if set. +// +// For ListKind, Parameters()[0] represents the list element type +// For MapKind, Parameters()[0] represents the map key type, and Parameters()[1] represents the map +// value type. +func (t *Type) Parameters() []*Type { + if t == nil { + return emptyParams + } + return t.parameters +} + +// DeclaredTypeName indicates the fully qualified and parameterized type-check type name. +func (t *Type) DeclaredTypeName() string { + // if the type itself is neither null, nor dyn, but is assignable to null, then it's a wrapper type. + if t.Kind() != NullTypeKind && !t.isDyn() && t.IsAssignableType(NullType) { + return fmt.Sprintf("wrapper(%s)", t.TypeName()) + } + return t.TypeName() +} + +// Type implements the ref.Val interface method. +func (t *Type) Type() ref.Type { + return TypeType +} + +// Value implements the ref.Val interface method. +func (t *Type) Value() any { + return t.TypeName() +} + +// TypeName returns the type-erased fully qualified runtime type name. +// +// TypeName implements the ref.Type interface method. +func (t *Type) TypeName() string { + if t == nil { + return "" + } + return t.runtimeTypeName +} + +// String returns a human-readable definition of the type name. +func (t *Type) String() string { + if len(t.Parameters()) == 0 { + return t.DeclaredTypeName() + } + params := make([]string, len(t.Parameters())) + for i, p := range t.Parameters() { + params[i] = p.String() + } + return fmt.Sprintf("%s(%s)", t.DeclaredTypeName(), strings.Join(params, ", ")) +} + +// isDyn indicates whether the type is dynamic in any way. +func (t *Type) isDyn() bool { + k := t.Kind() + return k == DynKind || k == AnyKind || k == TypeParamKind +} + +// defaultIsAssignableType provides the standard definition of what it means for one type to be assignable to another +// where any of the following may return a true result: +// - The from types are the same instance +// - The target type is dynamic +// - The fromType has the same kind and type name as the target type, and all parameters of the target type +// +// are IsAssignableType() from the parameters of the fromType. +func (t *Type) defaultIsAssignableType(fromType *Type) bool { + if t == fromType || t.isDyn() { + return true + } + if t.Kind() != fromType.Kind() || + t.TypeName() != fromType.TypeName() || + len(t.Parameters()) != len(fromType.Parameters()) { + return false + } + for i, tp := range t.Parameters() { + fp := fromType.Parameters()[i] + if !tp.IsAssignableType(fp) { + return false + } + } + return true +} + +// defaultIsAssignableRuntimeType inspects the type and in the case of list and map elements, the key and element types +// to determine whether a ref.Val is assignable to the declared type for a function signature. +func (t *Type) defaultIsAssignableRuntimeType(val ref.Val) bool { + valType := val.Type() + // If the current type and value type don't agree, then return + if !(t.isDyn() || t.TypeName() == valType.TypeName()) { + return false + } + switch t.Kind() { + case ListKind: + elemType := t.Parameters()[0] + l := val.(traits.Lister) + if l.Size() == IntZero { + return true + } + it := l.Iterator() + elemVal := it.Next() + return elemType.IsAssignableRuntimeType(elemVal) + case MapKind: + keyType := t.Parameters()[0] + elemType := t.Parameters()[1] + m := val.(traits.Mapper) + if m.Size() == IntZero { + return true + } + it := m.Iterator() + keyVal := it.Next() + elemVal := m.Get(keyVal) + return keyType.IsAssignableRuntimeType(keyVal) && elemType.IsAssignableRuntimeType(elemVal) + } + return true +} + +// NewListType creates an instances of a list type value with the provided element type. +func NewListType(elemType *Type) *Type { + return &Type{ + kind: ListKind, + parameters: []*Type{elemType}, + runtimeTypeName: "list", + traitMask: traits.AdderType | + traits.ContainerType | + traits.IndexerType | + traits.IterableType | + traits.SizerType, + } +} + +// NewMapType creates an instance of a map type value with the provided key and value types. +func NewMapType(keyType, valueType *Type) *Type { + return &Type{ + kind: MapKind, + parameters: []*Type{keyType, valueType}, + runtimeTypeName: "map", + traitMask: traits.ContainerType | + traits.IndexerType | + traits.IterableType | + traits.SizerType, + } +} + +// NewNullableType creates an instance of a nullable type with the provided wrapped type. +// +// Note: only primitive types are supported as wrapped types. +func NewNullableType(wrapped *Type) *Type { + return &Type{ + kind: wrapped.Kind(), + parameters: wrapped.Parameters(), + runtimeTypeName: wrapped.TypeName(), + traitMask: wrapped.traitMask, + isAssignableType: func(other *Type) bool { + return NullType.IsAssignableType(other) || wrapped.IsAssignableType(other) + }, + isAssignableRuntimeType: func(other ref.Val) bool { + return NullType.IsAssignableRuntimeType(other) || wrapped.IsAssignableRuntimeType(other) + }, + } +} + +// NewOptionalType creates an abstract parameterized type instance corresponding to CEL's notion of optional. +func NewOptionalType(param *Type) *Type { + return NewOpaqueType("optional", param) +} + +// NewOpaqueType creates an abstract parameterized type with a given name. +func NewOpaqueType(name string, params ...*Type) *Type { + return &Type{ + kind: OpaqueKind, + parameters: params, + runtimeTypeName: name, + } +} + +// NewObjectType creates a type reference to an externally defined type, e.g. a protobuf message type. +// +// An object type is assumed to support field presence testing and field indexing. Additionally, the +// type may also indicate additional traits through the use of the optional traits vararg argument. +func NewObjectType(typeName string, traits ...int) *Type { + // Function sanitizes object types on the fly + if wkt, found := checkedWellKnowns[typeName]; found { + return wkt + } + traitMask := 0 + for _, trait := range traits { + traitMask |= trait + } + return &Type{ + kind: StructKind, + parameters: emptyParams, + runtimeTypeName: typeName, + traitMask: structTypeTraitMask | traitMask, + } +} + +// NewObjectTypeValue creates a type reference to an externally defined type. +// +// Deprecated: use cel.ObjectType(typeName) +func NewObjectTypeValue(typeName string) *Type { + return NewObjectType(typeName) +} + +// NewTypeValue creates an opaque type which has a set of optional type traits as defined in +// the common/types/traits package. +// +// Deprecated: use cel.ObjectType(typeName, traits) +func NewTypeValue(typeName string, traits ...int) *Type { + traitMask := 0 + for _, trait := range traits { + traitMask |= trait + } + return &Type{ + kind: StructKind, + parameters: emptyParams, + runtimeTypeName: typeName, + traitMask: traitMask, + } +} + +// NewTypeParamType creates a parameterized type instance. +func NewTypeParamType(paramName string) *Type { + return &Type{ + kind: TypeParamKind, + runtimeTypeName: paramName, + } +} + +// NewTypeTypeWithParam creates a type with a type parameter. +// Used for type-checking purposes, but equivalent to TypeType otherwise. +func NewTypeTypeWithParam(param *Type) *Type { + return &Type{ + kind: TypeKind, + runtimeTypeName: "type", + parameters: []*Type{param}, + } +} + +// TypeToExprType converts a CEL-native type representation to a protobuf CEL Type representation. +func TypeToExprType(t *Type) (*exprpb.Type, error) { + switch t.Kind() { + case AnyKind: + return chkdecls.Any, nil + case BoolKind: + return maybeWrapper(t, chkdecls.Bool), nil + case BytesKind: + return maybeWrapper(t, chkdecls.Bytes), nil + case DoubleKind: + return maybeWrapper(t, chkdecls.Double), nil + case DurationKind: + return chkdecls.Duration, nil + case DynKind: + return chkdecls.Dyn, nil + case ErrorKind: + return chkdecls.Error, nil + case IntKind: + return maybeWrapper(t, chkdecls.Int), nil + case ListKind: + if len(t.Parameters()) != 1 { + return nil, fmt.Errorf("invalid list, got %d parameters, wanted one", len(t.Parameters())) + } + et, err := TypeToExprType(t.Parameters()[0]) + if err != nil { + return nil, err + } + return chkdecls.NewListType(et), nil + case MapKind: + if len(t.Parameters()) != 2 { + return nil, fmt.Errorf("invalid map, got %d parameters, wanted two", len(t.Parameters())) + } + kt, err := TypeToExprType(t.Parameters()[0]) + if err != nil { + return nil, err + } + vt, err := TypeToExprType(t.Parameters()[1]) + if err != nil { + return nil, err + } + return chkdecls.NewMapType(kt, vt), nil + case NullTypeKind: + return chkdecls.Null, nil + case OpaqueKind: + params := make([]*exprpb.Type, len(t.Parameters())) + for i, p := range t.Parameters() { + pt, err := TypeToExprType(p) + if err != nil { + return nil, err + } + params[i] = pt + } + return chkdecls.NewAbstractType(t.TypeName(), params...), nil + case StringKind: + return maybeWrapper(t, chkdecls.String), nil + case StructKind: + return chkdecls.NewObjectType(t.TypeName()), nil + case TimestampKind: + return chkdecls.Timestamp, nil + case TypeParamKind: + return chkdecls.NewTypeParamType(t.TypeName()), nil + case TypeKind: + if len(t.Parameters()) == 1 { + p, err := TypeToExprType(t.Parameters()[0]) + if err != nil { + return nil, err + } + return chkdecls.NewTypeType(p), nil + } + return chkdecls.NewTypeType(nil), nil + case UintKind: + return maybeWrapper(t, chkdecls.Uint), nil + } + return nil, fmt.Errorf("missing type conversion to proto: %v", t) +} + +// ExprTypeToType converts a protobuf CEL type representation to a CEL-native type representation. +func ExprTypeToType(t *exprpb.Type) (*Type, error) { + switch t.GetTypeKind().(type) { + case *exprpb.Type_Dyn: + return DynType, nil + case *exprpb.Type_AbstractType_: + paramTypes := make([]*Type, len(t.GetAbstractType().GetParameterTypes())) + for i, p := range t.GetAbstractType().GetParameterTypes() { + pt, err := ExprTypeToType(p) + if err != nil { + return nil, err + } + paramTypes[i] = pt + } + return NewOpaqueType(t.GetAbstractType().GetName(), paramTypes...), nil + case *exprpb.Type_ListType_: + et, err := ExprTypeToType(t.GetListType().GetElemType()) + if err != nil { + return nil, err + } + return NewListType(et), nil + case *exprpb.Type_MapType_: + kt, err := ExprTypeToType(t.GetMapType().GetKeyType()) + if err != nil { + return nil, err + } + vt, err := ExprTypeToType(t.GetMapType().GetValueType()) + if err != nil { + return nil, err + } + return NewMapType(kt, vt), nil + case *exprpb.Type_MessageType: + return NewObjectType(t.GetMessageType()), nil + case *exprpb.Type_Null: + return NullType, nil + case *exprpb.Type_Primitive: + switch t.GetPrimitive() { + case exprpb.Type_BOOL: + return BoolType, nil + case exprpb.Type_BYTES: + return BytesType, nil + case exprpb.Type_DOUBLE: + return DoubleType, nil + case exprpb.Type_INT64: + return IntType, nil + case exprpb.Type_STRING: + return StringType, nil + case exprpb.Type_UINT64: + return UintType, nil + default: + return nil, fmt.Errorf("unsupported primitive type: %v", t) + } + case *exprpb.Type_TypeParam: + return NewTypeParamType(t.GetTypeParam()), nil + case *exprpb.Type_Type: + if t.GetType().GetTypeKind() != nil { + p, err := ExprTypeToType(t.GetType()) + if err != nil { + return nil, err + } + return NewTypeTypeWithParam(p), nil + } + return TypeType, nil + case *exprpb.Type_WellKnown: + switch t.GetWellKnown() { + case exprpb.Type_ANY: + return AnyType, nil + case exprpb.Type_DURATION: + return DurationType, nil + case exprpb.Type_TIMESTAMP: + return TimestampType, nil + default: + return nil, fmt.Errorf("unsupported well-known type: %v", t) + } + case *exprpb.Type_Wrapper: + t, err := ExprTypeToType(&exprpb.Type{TypeKind: &exprpb.Type_Primitive{Primitive: t.GetWrapper()}}) + if err != nil { + return nil, err + } + return NewNullableType(t), nil + case *exprpb.Type_Error: + return ErrorType, nil + default: + return nil, fmt.Errorf("unsupported type: %v", t) + } +} + +func maybeWrapper(t *Type, pbType *exprpb.Type) *exprpb.Type { + if t.IsAssignableType(NullType) { + return chkdecls.NewWrapperType(pbType) + } + return pbType +} + +func maybeForeignType(t ref.Type) *Type { + if celType, ok := t.(*Type); ok { + return celType + } + // Inspect the incoming type to determine its traits. The assumption will be that the incoming + // type does not have any field values; however, if the trait mask indicates that field testing + // and indexing are supported, the foreign type is marked as a struct. + traitMask := 0 + for _, trait := range allTraits { + if t.HasTrait(trait) { + traitMask |= trait + } + } + // Treat the value like a struct. If it has no fields, this is harmless to denote the type + // as such since it basically becomes an opaque type by convention. + return NewObjectType(t.TypeName(), traitMask) +} + +var ( + checkedWellKnowns = map[string]*Type{ + // Wrapper types. + "google.protobuf.BoolValue": NewNullableType(BoolType), + "google.protobuf.BytesValue": NewNullableType(BytesType), + "google.protobuf.DoubleValue": NewNullableType(DoubleType), + "google.protobuf.FloatValue": NewNullableType(DoubleType), + "google.protobuf.Int64Value": NewNullableType(IntType), + "google.protobuf.Int32Value": NewNullableType(IntType), + "google.protobuf.UInt64Value": NewNullableType(UintType), + "google.protobuf.UInt32Value": NewNullableType(UintType), + "google.protobuf.StringValue": NewNullableType(StringType), + // Well-known types. + "google.protobuf.Any": AnyType, + "google.protobuf.Duration": DurationType, + "google.protobuf.Timestamp": TimestampType, + // Json types. + "google.protobuf.ListValue": NewListType(DynType), + "google.protobuf.NullValue": NullType, + "google.protobuf.Struct": NewMapType(StringType, DynType), + "google.protobuf.Value": DynType, + } + + emptyParams = []*Type{} + + allTraits = []int{ + traits.AdderType, + traits.ComparerType, + traits.ContainerType, + traits.DividerType, + traits.FieldTesterType, + traits.IndexerType, + traits.IterableType, + traits.IteratorType, + traits.MatcherType, + traits.ModderType, + traits.MultiplierType, + traits.NegatorType, + traits.ReceiverType, + traits.SizerType, + traits.SubtractorType, + } + + structTypeTraitMask = traits.FieldTesterType | traits.IndexerType +) diff --git a/vendor/github.com/google/cel-go/common/types/uint.go b/vendor/github.com/google/cel-go/common/types/uint.go index 615c7ec52..3257f9ade 100644 --- a/vendor/github.com/google/cel-go/common/types/uint.go +++ b/vendor/github.com/google/cel-go/common/types/uint.go @@ -21,7 +21,6 @@ import ( "strconv" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" anypb "google.golang.org/protobuf/types/known/anypb" structpb "google.golang.org/protobuf/types/known/structpb" @@ -32,15 +31,6 @@ import ( type Uint uint64 var ( - // UintType singleton. - UintType = NewTypeValue("uint", - traits.AdderType, - traits.ComparerType, - traits.DividerType, - traits.ModderType, - traits.MultiplierType, - traits.SubtractorType) - uint32WrapperType = reflect.TypeOf(&wrapperspb.UInt32Value{}) uint64WrapperType = reflect.TypeOf(&wrapperspb.UInt64Value{}) diff --git a/vendor/github.com/google/cel-go/common/types/unknown.go b/vendor/github.com/google/cel-go/common/types/unknown.go index bc411c15b..9dd2b2579 100644 --- a/vendor/github.com/google/cel-go/common/types/unknown.go +++ b/vendor/github.com/google/cel-go/common/types/unknown.go @@ -15,52 +15,312 @@ package types import ( + "fmt" + "math" "reflect" + "sort" + "strings" + "unicode" "github.com/google/cel-go/common/types/ref" ) -// Unknown type implementation which collects expression ids which caused the -// current value to become unknown. -type Unknown []int64 - var ( - // UnknownType singleton. - UnknownType = NewTypeValue("unknown") + unspecifiedAttribute = &AttributeTrail{qualifierPath: []any{}} ) +// NewAttributeTrail creates a new simple attribute from a variable name. +func NewAttributeTrail(variable string) *AttributeTrail { + if variable == "" { + return unspecifiedAttribute + } + return &AttributeTrail{variable: variable} +} + +// AttributeTrail specifies a variable with an optional qualifier path. An attribute value is expected to +// correspond to an AbsoluteAttribute, meaning a field selection which starts with a top-level variable. +// +// The qualifer path elements adhere to the AttributeQualifier type constraint. +type AttributeTrail struct { + variable string + qualifierPath []any +} + +// Equal returns whether two attribute values have the same variable name and qualifier paths. +func (a *AttributeTrail) Equal(other *AttributeTrail) bool { + if a.Variable() != other.Variable() || len(a.QualifierPath()) != len(other.QualifierPath()) { + return false + } + for i, q := range a.QualifierPath() { + qual := other.QualifierPath()[i] + if !qualifiersEqual(q, qual) { + return false + } + } + return true +} + +func qualifiersEqual(a, b any) bool { + if a == b { + return true + } + switch numA := a.(type) { + case int64: + numB, ok := b.(uint64) + if !ok { + return false + } + return intUintEqual(numA, numB) + case uint64: + numB, ok := b.(int64) + if !ok { + return false + } + return intUintEqual(numB, numA) + default: + return false + } +} + +func intUintEqual(i int64, u uint64) bool { + if i < 0 || u > math.MaxInt64 { + return false + } + return i == int64(u) +} + +// Variable returns the variable name associated with the attribute. +func (a *AttributeTrail) Variable() string { + return a.variable +} + +// QualifierPath returns the optional set of qualifying fields or indices applied to the variable. +func (a *AttributeTrail) QualifierPath() []any { + return a.qualifierPath +} + +// String returns the string representation of the Attribute. +func (a *AttributeTrail) String() string { + if a.variable == "" { + return "" + } + var str strings.Builder + str.WriteString(a.variable) + for _, q := range a.qualifierPath { + switch q := q.(type) { + case bool, int64: + str.WriteString(fmt.Sprintf("[%v]", q)) + case uint64: + str.WriteString(fmt.Sprintf("[%vu]", q)) + case string: + if isIdentifierCharacter(q) { + str.WriteString(fmt.Sprintf(".%v", q)) + } else { + str.WriteString(fmt.Sprintf("[%q]", q)) + } + } + } + return str.String() +} + +func isIdentifierCharacter(str string) bool { + for _, c := range str { + if unicode.IsLetter(c) || unicode.IsDigit(c) || string(c) == "_" { + continue + } + return false + } + return true +} + +// AttributeQualifier constrains the possible types which may be used to qualify an attribute. +type AttributeQualifier interface { + bool | int64 | uint64 | string +} + +// QualifyAttribute qualifies an attribute using a valid AttributeQualifier type. +func QualifyAttribute[T AttributeQualifier](attr *AttributeTrail, qualifier T) *AttributeTrail { + attr.qualifierPath = append(attr.qualifierPath, qualifier) + return attr +} + +// Unknown type which collects expression ids which caused the current value to become unknown. +type Unknown struct { + attributeTrails map[int64][]*AttributeTrail +} + +// NewUnknown creates a new unknown at a given expression id for an attribute. +// +// If the attribute is nil, the attribute value will be the `unspecifiedAttribute`. +func NewUnknown(id int64, attr *AttributeTrail) *Unknown { + if attr == nil { + attr = unspecifiedAttribute + } + return &Unknown{ + attributeTrails: map[int64][]*AttributeTrail{id: {attr}}, + } +} + +// IDs returns the set of unknown expression ids contained by this value. +// +// Numeric identifiers are guaranteed to be in sorted order. +func (u *Unknown) IDs() []int64 { + ids := make(int64Slice, len(u.attributeTrails)) + i := 0 + for id := range u.attributeTrails { + ids[i] = id + i++ + } + ids.Sort() + return ids +} + +// GetAttributeTrails returns the attribute trails, if present, missing for a given expression id. +func (u *Unknown) GetAttributeTrails(id int64) ([]*AttributeTrail, bool) { + trails, found := u.attributeTrails[id] + return trails, found +} + +// Contains returns true if the input unknown is a subset of the current unknown. +func (u *Unknown) Contains(other *Unknown) bool { + for id, otherTrails := range other.attributeTrails { + trails, found := u.attributeTrails[id] + if !found || len(otherTrails) != len(trails) { + return false + } + for _, ot := range otherTrails { + found := false + for _, t := range trails { + if t.Equal(ot) { + found = true + break + } + } + if !found { + return false + } + } + } + return true +} + // ConvertToNative implements ref.Val.ConvertToNative. -func (u Unknown) ConvertToNative(typeDesc reflect.Type) (any, error) { +func (u *Unknown) ConvertToNative(typeDesc reflect.Type) (any, error) { return u.Value(), nil } // ConvertToType is an identity function since unknown values cannot be modified. -func (u Unknown) ConvertToType(typeVal ref.Type) ref.Val { +func (u *Unknown) ConvertToType(typeVal ref.Type) ref.Val { return u } // Equal is an identity function since unknown values cannot be modified. -func (u Unknown) Equal(other ref.Val) ref.Val { +func (u *Unknown) Equal(other ref.Val) ref.Val { return u } +// String implements the Stringer interface +func (u *Unknown) String() string { + var str strings.Builder + for id, attrs := range u.attributeTrails { + if str.Len() != 0 { + str.WriteString(", ") + } + if len(attrs) == 1 { + str.WriteString(fmt.Sprintf("%v (%d)", attrs[0], id)) + } else { + str.WriteString(fmt.Sprintf("%v (%d)", attrs, id)) + } + } + return str.String() +} + // Type implements ref.Val.Type. -func (u Unknown) Type() ref.Type { +func (u *Unknown) Type() ref.Type { return UnknownType } // Value implements ref.Val.Value. -func (u Unknown) Value() any { - return []int64(u) +func (u *Unknown) Value() any { + return u } -// IsUnknown returns whether the element ref.Type or ref.Val is equal to the -// UnknownType singleton. +// IsUnknown returns whether the element ref.Val is in instance of *types.Unknown func IsUnknown(val ref.Val) bool { switch val.(type) { - case Unknown: + case *Unknown: return true default: return false } } + +// MaybeMergeUnknowns determines whether an input value and another, possibly nil, unknown will produce +// an unknown result. +// +// If the input `val` is another Unknown, then the result will be the merge of the `val` and the input +// `unk`. If the `val` is not unknown, then the result will depend on whether the input `unk` is nil. +// If both values are non-nil and unknown, then the return value will be a merge of both unknowns. +func MaybeMergeUnknowns(val ref.Val, unk *Unknown) (*Unknown, bool) { + src, isUnk := val.(*Unknown) + if !isUnk { + if unk != nil { + return unk, true + } + return unk, false + } + return MergeUnknowns(src, unk), true +} + +// MergeUnknowns combines two unknown values into a new unknown value. +func MergeUnknowns(unk1, unk2 *Unknown) *Unknown { + if unk1 == nil { + return unk2 + } + if unk2 == nil { + return unk1 + } + out := &Unknown{ + attributeTrails: make(map[int64][]*AttributeTrail, len(unk1.attributeTrails)+len(unk2.attributeTrails)), + } + for id, ats := range unk1.attributeTrails { + out.attributeTrails[id] = ats + } + for id, ats := range unk2.attributeTrails { + existing, found := out.attributeTrails[id] + if !found { + out.attributeTrails[id] = ats + continue + } + + for _, at := range ats { + found := false + for _, et := range existing { + if at.Equal(et) { + found = true + break + } + } + if !found { + existing = append(existing, at) + } + } + out.attributeTrails[id] = existing + } + return out +} + +// int64Slice is an implementation of the sort.Interface +type int64Slice []int64 + +// Len returns the number of elements in the slice. +func (x int64Slice) Len() int { return len(x) } + +// Less indicates whether the value at index i is less than the value at index j. +func (x int64Slice) Less(i, j int) bool { return x[i] < x[j] } + +// Swap swaps the values at indices i and j in place. +func (x int64Slice) Swap(i, j int) { x[i], x[j] = x[j], x[i] } + +// Sort is a convenience method: x.Sort() calls Sort(x). +func (x int64Slice) Sort() { sort.Sort(x) } diff --git a/vendor/github.com/google/cel-go/common/types/util.go b/vendor/github.com/google/cel-go/common/types/util.go index a8e9afa9e..71662eee3 100644 --- a/vendor/github.com/google/cel-go/common/types/util.go +++ b/vendor/github.com/google/cel-go/common/types/util.go @@ -21,7 +21,7 @@ import ( // IsUnknownOrError returns whether the input element ref.Val is an ErrType or UnknownType. func IsUnknownOrError(val ref.Val) bool { switch val.(type) { - case Unknown, *Err: + case *Unknown, *Err: return true } return false diff --git a/vendor/github.com/google/cel-go/ext/BUILD.bazel b/vendor/github.com/google/cel-go/ext/BUILD.bazel index 4bcf8a283..570130229 100644 --- a/vendor/github.com/google/cel-go/ext/BUILD.bazel +++ b/vendor/github.com/google/cel-go/ext/BUILD.bazel @@ -7,8 +7,11 @@ package( go_library( name = "go_default_library", srcs = [ + "bindings.go", "encoders.go", + "formatting.go", "guards.go", + "lists.go", "math.go", "native.go", "protos.go", @@ -19,15 +22,14 @@ go_library( visibility = ["//visibility:public"], deps = [ "//cel:go_default_library", - "//checker/decls:go_default_library", - "//common:go_default_library", + "//checker:go_default_library", + "//common/ast:go_default_library", "//common/overloads:go_default_library", "//common/types:go_default_library", "//common/types/pb:go_default_library", "//common/types/ref:go_default_library", "//common/types/traits:go_default_library", "//interpreter:go_default_library", - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//reflect/protoreflect:go_default_library", "@org_golang_google_protobuf//types/known/structpb", @@ -41,6 +43,7 @@ go_test( size = "small", srcs = [ "encoders_test.go", + "lists_test.go", "math_test.go", "native_test.go", "protos_test.go", @@ -53,14 +56,12 @@ go_test( deps = [ "//cel:go_default_library", "//checker:go_default_library", - "//common:go_default_library", "//common/types:go_default_library", "//common/types/ref:go_default_library", "//common/types/traits:go_default_library", "//test:go_default_library", "//test/proto2pb:go_default_library", "//test/proto3pb:go_default_library", - "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//types/known/wrapperspb:go_default_library", "@org_golang_google_protobuf//encoding/protojson:go_default_library", diff --git a/vendor/github.com/google/cel-go/ext/README.md b/vendor/github.com/google/cel-go/ext/README.md index ef0eb2ab7..2fac0cb22 100644 --- a/vendor/github.com/google/cel-go/ext/README.md +++ b/vendor/github.com/google/cel-go/ext/README.md @@ -149,6 +149,23 @@ Example: proto.hasExt(msg, google.expr.proto2.test.int32_ext) // returns true || false +## Lists + +Extended functions for list manipulation. As a general note, all indices are +zero-based. + +### Slice + + +Returns a new sub-list using the indexes provided. + + .slice(, ) -> + +Examples: + + [1,2,3,4].slice(1, 3) // return [2, 3] + [1,2,3,4].slice(2, 4) // return [3 ,4] + ## Sets Sets provides set relationship tests. @@ -397,3 +414,17 @@ Examples: 'TacoCat'.upperAscii() // returns 'TACOCAT' 'TacoCÆt Xii'.upperAscii() // returns 'TACOCÆT XII' + +### Reverse + +Returns a new string whose characters are the same as the target string, only formatted in +reverse order. +This function relies on converting strings to rune arrays in order to reverse. +It can be located in Version 3 of strings. + + .reverse() -> + +Examples: + + 'gums'.reverse() // returns 'smug' + 'John Smith'.reverse() // returns 'htimS nhoJ' \ No newline at end of file diff --git a/vendor/github.com/google/cel-go/ext/bindings.go b/vendor/github.com/google/cel-go/ext/bindings.go index 9cc3c3efe..2c6cc627f 100644 --- a/vendor/github.com/google/cel-go/ext/bindings.go +++ b/vendor/github.com/google/cel-go/ext/bindings.go @@ -16,9 +16,8 @@ package ext import ( "github.com/google/cel-go/cel" - "github.com/google/cel-go/common" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" ) // Bindings returns a cel.EnvOption to configure support for local variable @@ -62,7 +61,7 @@ func (celBindings) CompileOptions() []cel.EnvOption { return []cel.EnvOption{ cel.Macros( // cel.bind(var, , ) - cel.NewReceiverMacro(bindMacro, 3, celBind), + cel.ReceiverMacro(bindMacro, 3, celBind), ), } } @@ -71,30 +70,27 @@ func (celBindings) ProgramOptions() []cel.ProgramOption { return []cel.ProgramOption{} } -func celBind(meh cel.MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func celBind(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) { if !macroTargetMatchesNamespace(celNamespace, target) { return nil, nil } varIdent := args[0] varName := "" - switch varIdent.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - varName = varIdent.GetIdentExpr().GetName() + switch varIdent.Kind() { + case ast.IdentKind: + varName = varIdent.AsIdent() default: - return nil, &common.Error{ - Message: "cel.bind() variable names must be simple identifers", - Location: meh.OffsetLocation(varIdent.GetId()), - } + return nil, mef.NewError(varIdent.ID(), "cel.bind() variable names must be simple identifiers") } varInit := args[1] resultExpr := args[2] - return meh.Fold( + return mef.NewComprehension( + mef.NewList(), unusedIterVar, - meh.NewList(), varName, varInit, - meh.LiteralBool(false), - meh.Ident(varName), + mef.NewLiteral(types.False), + mef.NewIdent(varName), resultExpr, ), nil } diff --git a/vendor/github.com/google/cel-go/ext/encoders.go b/vendor/github.com/google/cel-go/ext/encoders.go index d9f9cb515..61ac0b777 100644 --- a/vendor/github.com/google/cel-go/ext/encoders.go +++ b/vendor/github.com/google/cel-go/ext/encoders.go @@ -16,7 +16,6 @@ package ext import ( "encoding/base64" - "reflect" "github.com/google/cel-go/cel" "github.com/google/cel-go/common/types" @@ -86,7 +85,3 @@ func base64DecodeString(str string) ([]byte, error) { func base64EncodeBytes(bytes []byte) (string, error) { return base64.StdEncoding.EncodeToString(bytes), nil } - -var ( - bytesListType = reflect.TypeOf([]byte{}) -) diff --git a/vendor/github.com/google/cel-go/ext/formatting.go b/vendor/github.com/google/cel-go/ext/formatting.go new file mode 100644 index 000000000..2f35b996c --- /dev/null +++ b/vendor/github.com/google/cel-go/ext/formatting.go @@ -0,0 +1,904 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ext + +import ( + "errors" + "fmt" + "math" + "sort" + "strconv" + "strings" + "unicode" + + "golang.org/x/text/language" + "golang.org/x/text/message" + + "github.com/google/cel-go/cel" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/overloads" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" +) + +type clauseImpl func(ref.Val, string) (string, error) + +func clauseForType(argType ref.Type) (clauseImpl, error) { + switch argType { + case types.IntType, types.UintType: + return formatDecimal, nil + case types.StringType, types.BytesType, types.BoolType, types.NullType, types.TypeType: + return FormatString, nil + case types.TimestampType, types.DurationType: + // special case to ensure timestamps/durations get printed as CEL literals + return func(arg ref.Val, locale string) (string, error) { + argStrVal := arg.ConvertToType(types.StringType) + argStr := argStrVal.Value().(string) + if arg.Type() == types.TimestampType { + return fmt.Sprintf("timestamp(%q)", argStr), nil + } + if arg.Type() == types.DurationType { + return fmt.Sprintf("duration(%q)", argStr), nil + } + return "", fmt.Errorf("cannot convert argument of type %s to timestamp/duration", arg.Type().TypeName()) + }, nil + case types.ListType: + return formatList, nil + case types.MapType: + return formatMap, nil + case types.DoubleType: + // avoid formatFixed so we can output a period as the decimal separator in order + // to always be a valid CEL literal + return func(arg ref.Val, locale string) (string, error) { + argDouble, ok := arg.Value().(float64) + if !ok { + return "", fmt.Errorf("couldn't convert %s to float64", arg.Type().TypeName()) + } + fmtStr := fmt.Sprintf("%%.%df", defaultPrecision) + return fmt.Sprintf(fmtStr, argDouble), nil + }, nil + case types.TypeType: + return func(arg ref.Val, locale string) (string, error) { + return fmt.Sprintf("type(%s)", arg.Value().(string)), nil + }, nil + default: + return nil, fmt.Errorf("no formatting function for %s", argType.TypeName()) + } +} + +func formatList(arg ref.Val, locale string) (string, error) { + argList := arg.(traits.Lister) + argIterator := argList.Iterator() + var listStrBuilder strings.Builder + _, err := listStrBuilder.WriteRune('[') + if err != nil { + return "", fmt.Errorf("error writing to list string: %w", err) + } + for argIterator.HasNext() == types.True { + member := argIterator.Next() + memberFormat, err := clauseForType(member.Type()) + if err != nil { + return "", err + } + unquotedStr, err := memberFormat(member, locale) + if err != nil { + return "", err + } + str := quoteForCEL(member, unquotedStr) + _, err = listStrBuilder.WriteString(str) + if err != nil { + return "", fmt.Errorf("error writing to list string: %w", err) + } + if argIterator.HasNext() == types.True { + _, err = listStrBuilder.WriteString(", ") + if err != nil { + return "", fmt.Errorf("error writing to list string: %w", err) + } + } + } + _, err = listStrBuilder.WriteRune(']') + if err != nil { + return "", fmt.Errorf("error writing to list string: %w", err) + } + return listStrBuilder.String(), nil +} + +func formatMap(arg ref.Val, locale string) (string, error) { + argMap := arg.(traits.Mapper) + argIterator := argMap.Iterator() + type mapPair struct { + key string + value string + } + argPairs := make([]mapPair, argMap.Size().Value().(int64)) + i := 0 + for argIterator.HasNext() == types.True { + key := argIterator.Next() + var keyFormat clauseImpl + switch key.Type() { + case types.StringType, types.BoolType: + keyFormat = FormatString + case types.IntType, types.UintType: + keyFormat = formatDecimal + default: + return "", fmt.Errorf("no formatting function for map key of type %s", key.Type().TypeName()) + } + unquotedKeyStr, err := keyFormat(key, locale) + if err != nil { + return "", err + } + keyStr := quoteForCEL(key, unquotedKeyStr) + value, found := argMap.Find(key) + if !found { + return "", fmt.Errorf("could not find key: %q", key) + } + valueFormat, err := clauseForType(value.Type()) + if err != nil { + return "", err + } + unquotedValueStr, err := valueFormat(value, locale) + if err != nil { + return "", err + } + valueStr := quoteForCEL(value, unquotedValueStr) + argPairs[i] = mapPair{keyStr, valueStr} + i++ + } + sort.SliceStable(argPairs, func(x, y int) bool { + return argPairs[x].key < argPairs[y].key + }) + var mapStrBuilder strings.Builder + _, err := mapStrBuilder.WriteRune('{') + if err != nil { + return "", fmt.Errorf("error writing to map string: %w", err) + } + for i, entry := range argPairs { + _, err = mapStrBuilder.WriteString(fmt.Sprintf("%s:%s", entry.key, entry.value)) + if err != nil { + return "", fmt.Errorf("error writing to map string: %w", err) + } + if i < len(argPairs)-1 { + _, err = mapStrBuilder.WriteString(", ") + if err != nil { + return "", fmt.Errorf("error writing to map string: %w", err) + } + } + } + _, err = mapStrBuilder.WriteRune('}') + if err != nil { + return "", fmt.Errorf("error writing to map string: %w", err) + } + return mapStrBuilder.String(), nil +} + +// quoteForCEL takes a formatted, unquoted value and quotes it in a manner suitable +// for embedding directly in CEL. +func quoteForCEL(refVal ref.Val, unquotedValue string) string { + switch refVal.Type() { + case types.StringType: + return fmt.Sprintf("%q", unquotedValue) + case types.BytesType: + return fmt.Sprintf("b%q", unquotedValue) + case types.DoubleType: + // special case to handle infinity/NaN + num := refVal.Value().(float64) + if math.IsInf(num, 1) || math.IsInf(num, -1) || math.IsNaN(num) { + return fmt.Sprintf("%q", unquotedValue) + } + return unquotedValue + default: + return unquotedValue + } +} + +// FormatString returns the string representation of a CEL value. +// +// It is used to implement the %s specifier in the (string).format() extension function. +func FormatString(arg ref.Val, locale string) (string, error) { + switch arg.Type() { + case types.ListType: + return formatList(arg, locale) + case types.MapType: + return formatMap(arg, locale) + case types.IntType, types.UintType, types.DoubleType, + types.BoolType, types.StringType, types.TimestampType, types.BytesType, types.DurationType, types.TypeType: + argStrVal := arg.ConvertToType(types.StringType) + argStr, ok := argStrVal.Value().(string) + if !ok { + return "", fmt.Errorf("could not convert argument %q to string", argStrVal) + } + return argStr, nil + case types.NullType: + return "null", nil + default: + return "", stringFormatError(runtimeID, arg.Type().TypeName()) + } +} + +func formatDecimal(arg ref.Val, locale string) (string, error) { + switch arg.Type() { + case types.IntType: + argInt, ok := arg.ConvertToType(types.IntType).Value().(int64) + if !ok { + return "", fmt.Errorf("could not convert \"%s\" to int64", arg.Value()) + } + return fmt.Sprintf("%d", argInt), nil + case types.UintType: + argInt, ok := arg.ConvertToType(types.UintType).Value().(uint64) + if !ok { + return "", fmt.Errorf("could not convert \"%s\" to uint64", arg.Value()) + } + return fmt.Sprintf("%d", argInt), nil + default: + return "", decimalFormatError(runtimeID, arg.Type().TypeName()) + } +} + +func matchLanguage(locale string) (language.Tag, error) { + matcher, err := makeMatcher(locale) + if err != nil { + return language.Und, err + } + tag, _ := language.MatchStrings(matcher, locale) + return tag, nil +} + +func makeMatcher(locale string) (language.Matcher, error) { + tags := make([]language.Tag, 0) + tag, err := language.Parse(locale) + if err != nil { + return nil, err + } + tags = append(tags, tag) + return language.NewMatcher(tags), nil +} + +type stringFormatter struct{} + +func (c *stringFormatter) String(arg ref.Val, locale string) (string, error) { + return FormatString(arg, locale) +} + +func (c *stringFormatter) Decimal(arg ref.Val, locale string) (string, error) { + return formatDecimal(arg, locale) +} + +func (c *stringFormatter) Fixed(precision *int) func(ref.Val, string) (string, error) { + if precision == nil { + precision = new(int) + *precision = defaultPrecision + } + return func(arg ref.Val, locale string) (string, error) { + strException := false + if arg.Type() == types.StringType { + argStr := arg.Value().(string) + if argStr == "NaN" || argStr == "Infinity" || argStr == "-Infinity" { + strException = true + } + } + if arg.Type() != types.DoubleType && !strException { + return "", fixedPointFormatError(runtimeID, arg.Type().TypeName()) + } + argFloatVal := arg.ConvertToType(types.DoubleType) + argFloat, ok := argFloatVal.Value().(float64) + if !ok { + return "", fmt.Errorf("could not convert \"%s\" to float64", argFloatVal.Value()) + } + fmtStr := fmt.Sprintf("%%.%df", *precision) + + matchedLocale, err := matchLanguage(locale) + if err != nil { + return "", fmt.Errorf("error matching locale: %w", err) + } + return message.NewPrinter(matchedLocale).Sprintf(fmtStr, argFloat), nil + } +} + +func (c *stringFormatter) Scientific(precision *int) func(ref.Val, string) (string, error) { + if precision == nil { + precision = new(int) + *precision = defaultPrecision + } + return func(arg ref.Val, locale string) (string, error) { + strException := false + if arg.Type() == types.StringType { + argStr := arg.Value().(string) + if argStr == "NaN" || argStr == "Infinity" || argStr == "-Infinity" { + strException = true + } + } + if arg.Type() != types.DoubleType && !strException { + return "", scientificFormatError(runtimeID, arg.Type().TypeName()) + } + argFloatVal := arg.ConvertToType(types.DoubleType) + argFloat, ok := argFloatVal.Value().(float64) + if !ok { + return "", fmt.Errorf("could not convert \"%v\" to float64", argFloatVal.Value()) + } + matchedLocale, err := matchLanguage(locale) + if err != nil { + return "", fmt.Errorf("error matching locale: %w", err) + } + fmtStr := fmt.Sprintf("%%%de", *precision) + return message.NewPrinter(matchedLocale).Sprintf(fmtStr, argFloat), nil + } +} + +func (c *stringFormatter) Binary(arg ref.Val, locale string) (string, error) { + switch arg.Type() { + case types.IntType: + argInt := arg.Value().(int64) + // locale is intentionally unused as integers formatted as binary + // strings are locale-independent + return fmt.Sprintf("%b", argInt), nil + case types.UintType: + argInt := arg.Value().(uint64) + return fmt.Sprintf("%b", argInt), nil + case types.BoolType: + argBool := arg.Value().(bool) + if argBool { + return "1", nil + } + return "0", nil + default: + return "", binaryFormatError(runtimeID, arg.Type().TypeName()) + } +} + +func (c *stringFormatter) Hex(useUpper bool) func(ref.Val, string) (string, error) { + return func(arg ref.Val, locale string) (string, error) { + fmtStr := "%x" + if useUpper { + fmtStr = "%X" + } + switch arg.Type() { + case types.StringType, types.BytesType: + if arg.Type() == types.BytesType { + return fmt.Sprintf(fmtStr, arg.Value().([]byte)), nil + } + return fmt.Sprintf(fmtStr, arg.Value().(string)), nil + case types.IntType: + argInt, ok := arg.Value().(int64) + if !ok { + return "", fmt.Errorf("could not convert \"%s\" to int64", arg.Value()) + } + return fmt.Sprintf(fmtStr, argInt), nil + case types.UintType: + argInt, ok := arg.Value().(uint64) + if !ok { + return "", fmt.Errorf("could not convert \"%s\" to uint64", arg.Value()) + } + return fmt.Sprintf(fmtStr, argInt), nil + default: + return "", hexFormatError(runtimeID, arg.Type().TypeName()) + } + } +} + +func (c *stringFormatter) Octal(arg ref.Val, locale string) (string, error) { + switch arg.Type() { + case types.IntType: + argInt := arg.Value().(int64) + return fmt.Sprintf("%o", argInt), nil + case types.UintType: + argInt := arg.Value().(uint64) + return fmt.Sprintf("%o", argInt), nil + default: + return "", octalFormatError(runtimeID, arg.Type().TypeName()) + } +} + +// stringFormatValidator implements the cel.ASTValidator interface allowing for static validation +// of string.format calls. +type stringFormatValidator struct{} + +// Name returns the name of the validator. +func (stringFormatValidator) Name() string { + return "cel.lib.ext.validate.functions.string.format" +} + +// Configure implements the ASTValidatorConfigurer interface and augments the list of functions to skip +// during homogeneous aggregate literal type-checks. +func (stringFormatValidator) Configure(config cel.MutableValidatorConfig) error { + functions := config.GetOrDefault(cel.HomogeneousAggregateLiteralExemptFunctions, []string{}).([]string) + functions = append(functions, "format") + return config.Set(cel.HomogeneousAggregateLiteralExemptFunctions, functions) +} + +// Validate parses all literal format strings and type checks the format clause against the argument +// at the corresponding ordinal within the list literal argument to the function, if one is specified. +func (stringFormatValidator) Validate(env *cel.Env, _ cel.ValidatorConfig, a *ast.AST, iss *cel.Issues) { + root := ast.NavigateAST(a) + formatCallExprs := ast.MatchDescendants(root, matchConstantFormatStringWithListLiteralArgs(a)) + for _, e := range formatCallExprs { + call := e.AsCall() + formatStr := call.Target().AsLiteral().Value().(string) + args := call.Args()[0].AsList().Elements() + formatCheck := &stringFormatChecker{ + args: args, + ast: a, + } + // use a placeholder locale, since locale doesn't affect syntax + _, err := parseFormatString(formatStr, formatCheck, formatCheck, "en_US") + if err != nil { + iss.ReportErrorAtID(getErrorExprID(e.ID(), err), err.Error()) + continue + } + seenArgs := formatCheck.argsRequested + if len(args) > seenArgs { + iss.ReportErrorAtID(e.ID(), + "too many arguments supplied to string.format (expected %d, got %d)", seenArgs, len(args)) + } + } +} + +// getErrorExprID determines which list literal argument triggered a type-disagreement for the +// purposes of more accurate error message reports. +func getErrorExprID(id int64, err error) int64 { + fmtErr, ok := err.(formatError) + if ok { + return fmtErr.id + } + wrapped := errors.Unwrap(err) + if wrapped != nil { + return getErrorExprID(id, wrapped) + } + return id +} + +// matchConstantFormatStringWithListLiteralArgs matches all valid expression nodes for string +// format checking. +func matchConstantFormatStringWithListLiteralArgs(a *ast.AST) ast.ExprMatcher { + return func(e ast.NavigableExpr) bool { + if e.Kind() != ast.CallKind { + return false + } + call := e.AsCall() + if !call.IsMemberFunction() || call.FunctionName() != "format" { + return false + } + overloadIDs := a.GetOverloadIDs(e.ID()) + if len(overloadIDs) != 0 { + found := false + for _, overload := range overloadIDs { + if overload == overloads.ExtFormatString { + found = true + break + } + } + if !found { + return false + } + } + formatString := call.Target() + if formatString.Kind() != ast.LiteralKind && formatString.AsLiteral().Type() != cel.StringType { + return false + } + args := call.Args() + if len(args) != 1 { + return false + } + formatArgs := args[0] + return formatArgs.Kind() == ast.ListKind + } +} + +// stringFormatChecker implements the formatStringInterpolater interface +type stringFormatChecker struct { + args []ast.Expr + argsRequested int + currArgIndex int64 + ast *ast.AST +} + +func (c *stringFormatChecker) String(arg ref.Val, locale string) (string, error) { + formatArg := c.args[c.currArgIndex] + valid, badID := c.verifyString(formatArg) + if !valid { + return "", stringFormatError(badID, c.typeOf(badID).TypeName()) + } + return "", nil +} + +func (c *stringFormatChecker) Decimal(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + valid := c.verifyTypeOneOf(id, types.IntType, types.UintType) + if !valid { + return "", decimalFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil +} + +func (c *stringFormatChecker) Fixed(precision *int) func(ref.Val, string) (string, error) { + return func(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + // we allow StringType since "NaN", "Infinity", and "-Infinity" are also valid values + valid := c.verifyTypeOneOf(id, types.DoubleType, types.StringType) + if !valid { + return "", fixedPointFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil + } +} + +func (c *stringFormatChecker) Scientific(precision *int) func(ref.Val, string) (string, error) { + return func(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + valid := c.verifyTypeOneOf(id, types.DoubleType, types.StringType) + if !valid { + return "", scientificFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil + } +} + +func (c *stringFormatChecker) Binary(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + valid := c.verifyTypeOneOf(id, types.IntType, types.UintType, types.BoolType) + if !valid { + return "", binaryFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil +} + +func (c *stringFormatChecker) Hex(useUpper bool) func(ref.Val, string) (string, error) { + return func(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + valid := c.verifyTypeOneOf(id, types.IntType, types.UintType, types.StringType, types.BytesType) + if !valid { + return "", hexFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil + } +} + +func (c *stringFormatChecker) Octal(arg ref.Val, locale string) (string, error) { + id := c.args[c.currArgIndex].ID() + valid := c.verifyTypeOneOf(id, types.IntType, types.UintType) + if !valid { + return "", octalFormatError(id, c.typeOf(id).TypeName()) + } + return "", nil +} + +func (c *stringFormatChecker) Arg(index int64) (ref.Val, error) { + c.argsRequested++ + c.currArgIndex = index + // return a dummy value - this is immediately passed to back to us + // through one of the FormatCallback functions, so anything will do + return types.Int(0), nil +} + +func (c *stringFormatChecker) Size() int64 { + return int64(len(c.args)) +} + +func (c *stringFormatChecker) typeOf(id int64) *cel.Type { + return c.ast.GetType(id) +} + +func (c *stringFormatChecker) verifyTypeOneOf(id int64, validTypes ...*cel.Type) bool { + t := c.typeOf(id) + if t == cel.DynType { + return true + } + for _, vt := range validTypes { + // Only check runtime type compatibility without delving deeper into parameterized types + if t.Kind() == vt.Kind() { + return true + } + } + return false +} + +func (c *stringFormatChecker) verifyString(sub ast.Expr) (bool, int64) { + paramA := cel.TypeParamType("A") + paramB := cel.TypeParamType("B") + subVerified := c.verifyTypeOneOf(sub.ID(), + cel.ListType(paramA), cel.MapType(paramA, paramB), + cel.IntType, cel.UintType, cel.DoubleType, cel.BoolType, cel.StringType, + cel.TimestampType, cel.BytesType, cel.DurationType, cel.TypeType, cel.NullType) + if !subVerified { + return false, sub.ID() + } + switch sub.Kind() { + case ast.ListKind: + for _, e := range sub.AsList().Elements() { + // recursively verify if we're dealing with a list/map + verified, id := c.verifyString(e) + if !verified { + return false, id + } + } + return true, sub.ID() + case ast.MapKind: + for _, e := range sub.AsMap().Entries() { + // recursively verify if we're dealing with a list/map + entry := e.AsMapEntry() + verified, id := c.verifyString(entry.Key()) + if !verified { + return false, id + } + verified, id = c.verifyString(entry.Value()) + if !verified { + return false, id + } + } + return true, sub.ID() + default: + return true, sub.ID() + } +} + +// helper routines for reporting common errors during string formatting static validation and +// runtime execution. + +func binaryFormatError(id int64, badType string) error { + return newFormatError(id, "only integers and bools can be formatted as binary, was given %s", badType) +} + +func decimalFormatError(id int64, badType string) error { + return newFormatError(id, "decimal clause can only be used on integers, was given %s", badType) +} + +func fixedPointFormatError(id int64, badType string) error { + return newFormatError(id, "fixed-point clause can only be used on doubles, was given %s", badType) +} + +func hexFormatError(id int64, badType string) error { + return newFormatError(id, "only integers, byte buffers, and strings can be formatted as hex, was given %s", badType) +} + +func octalFormatError(id int64, badType string) error { + return newFormatError(id, "octal clause can only be used on integers, was given %s", badType) +} + +func scientificFormatError(id int64, badType string) error { + return newFormatError(id, "scientific clause can only be used on doubles, was given %s", badType) +} + +func stringFormatError(id int64, badType string) error { + return newFormatError(id, "string clause can only be used on strings, bools, bytes, ints, doubles, maps, lists, types, durations, and timestamps, was given %s", badType) +} + +type formatError struct { + id int64 + msg string +} + +func newFormatError(id int64, msg string, args ...any) error { + return formatError{ + id: id, + msg: fmt.Sprintf(msg, args...), + } +} + +func (e formatError) Error() string { + return e.msg +} + +func (e formatError) Is(target error) bool { + return e.msg == target.Error() +} + +// stringArgList implements the formatListArgs interface. +type stringArgList struct { + args traits.Lister +} + +func (c *stringArgList) Arg(index int64) (ref.Val, error) { + if index >= c.args.Size().Value().(int64) { + return nil, fmt.Errorf("index %d out of range", index) + } + return c.args.Get(types.Int(index)), nil +} + +func (c *stringArgList) Size() int64 { + return c.args.Size().Value().(int64) +} + +// formatStringInterpolator is an interface that allows user-defined behavior +// for formatting clause implementations, as well as argument retrieval. +// Each function is expected to support the appropriate types as laid out in +// the string.format documentation, and to return an error if given an inappropriate type. +type formatStringInterpolator interface { + // String takes a ref.Val and a string representing the current locale identifier + // and returns the Val formatted as a string, or an error if one occurred. + String(ref.Val, string) (string, error) + + // Decimal takes a ref.Val and a string representing the current locale identifier + // and returns the Val formatted as a decimal integer, or an error if one occurred. + Decimal(ref.Val, string) (string, error) + + // Fixed takes an int pointer representing precision (or nil if none was given) and + // returns a function operating in a similar manner to String and Decimal, taking a + // ref.Val and locale and returning the appropriate string. A closure is returned + // so precision can be set without needing an additional function call/configuration. + Fixed(*int) func(ref.Val, string) (string, error) + + // Scientific functions identically to Fixed, except the string returned from the closure + // is expected to be in scientific notation. + Scientific(*int) func(ref.Val, string) (string, error) + + // Binary takes a ref.Val and a string representing the current locale identifier + // and returns the Val formatted as a binary integer, or an error if one occurred. + Binary(ref.Val, string) (string, error) + + // Hex takes a boolean that, if true, indicates the hex string output by the returned + // closure should use uppercase letters for A-F. + Hex(bool) func(ref.Val, string) (string, error) + + // Octal takes a ref.Val and a string representing the current locale identifier and + // returns the Val formatted in octal, or an error if one occurred. + Octal(ref.Val, string) (string, error) +} + +// formatListArgs is an interface that allows user-defined list-like datatypes to be used +// for formatting clause implementations. +type formatListArgs interface { + // Arg returns the ref.Val at the given index, or an error if one occurred. + Arg(int64) (ref.Val, error) + + // Size returns the length of the argument list. + Size() int64 +} + +// parseFormatString formats a string according to the string.format syntax, taking the clause implementations +// from the provided FormatCallback and the args from the given FormatList. +func parseFormatString(formatStr string, callback formatStringInterpolator, list formatListArgs, locale string) (string, error) { + i := 0 + argIndex := 0 + var builtStr strings.Builder + for i < len(formatStr) { + if formatStr[i] == '%' { + if i+1 < len(formatStr) && formatStr[i+1] == '%' { + err := builtStr.WriteByte('%') + if err != nil { + return "", fmt.Errorf("error writing format string: %w", err) + } + i += 2 + continue + } else { + argAny, err := list.Arg(int64(argIndex)) + if err != nil { + return "", err + } + if i+1 >= len(formatStr) { + return "", errors.New("unexpected end of string") + } + if int64(argIndex) >= list.Size() { + return "", fmt.Errorf("index %d out of range", argIndex) + } + numRead, val, refErr := parseAndFormatClause(formatStr[i:], argAny, callback, list, locale) + if refErr != nil { + return "", refErr + } + _, err = builtStr.WriteString(val) + if err != nil { + return "", fmt.Errorf("error writing format string: %w", err) + } + i += numRead + argIndex++ + } + } else { + err := builtStr.WriteByte(formatStr[i]) + if err != nil { + return "", fmt.Errorf("error writing format string: %w", err) + } + i++ + } + } + return builtStr.String(), nil +} + +// parseAndFormatClause parses the format clause at the start of the given string with val, and returns +// how many characters were consumed and the substituted string form of val, or an error if one occurred. +func parseAndFormatClause(formatStr string, val ref.Val, callback formatStringInterpolator, list formatListArgs, locale string) (int, string, error) { + i := 1 + read, formatter, err := parseFormattingClause(formatStr[i:], callback) + i += read + if err != nil { + return -1, "", newParseFormatError("could not parse formatting clause", err) + } + + valStr, err := formatter(val, locale) + if err != nil { + return -1, "", newParseFormatError("error during formatting", err) + } + return i, valStr, nil +} + +func parseFormattingClause(formatStr string, callback formatStringInterpolator) (int, clauseImpl, error) { + i := 0 + read, precision, err := parsePrecision(formatStr[i:]) + i += read + if err != nil { + return -1, nil, fmt.Errorf("error while parsing precision: %w", err) + } + r := rune(formatStr[i]) + i++ + switch r { + case 's': + return i, callback.String, nil + case 'd': + return i, callback.Decimal, nil + case 'f': + return i, callback.Fixed(precision), nil + case 'e': + return i, callback.Scientific(precision), nil + case 'b': + return i, callback.Binary, nil + case 'x', 'X': + return i, callback.Hex(unicode.IsUpper(r)), nil + case 'o': + return i, callback.Octal, nil + default: + return -1, nil, fmt.Errorf("unrecognized formatting clause \"%c\"", r) + } +} + +func parsePrecision(formatStr string) (int, *int, error) { + i := 0 + if formatStr[i] != '.' { + return i, nil, nil + } + i++ + var buffer strings.Builder + for { + if i >= len(formatStr) { + return -1, nil, errors.New("could not find end of precision specifier") + } + if !isASCIIDigit(rune(formatStr[i])) { + break + } + buffer.WriteByte(formatStr[i]) + i++ + } + precision, err := strconv.Atoi(buffer.String()) + if err != nil { + return -1, nil, fmt.Errorf("error while converting precision to integer: %w", err) + } + return i, &precision, nil +} + +func isASCIIDigit(r rune) bool { + return r <= unicode.MaxASCII && unicode.IsDigit(r) +} + +type parseFormatError struct { + msg string + wrapped error +} + +func newParseFormatError(msg string, wrapped error) error { + return parseFormatError{msg: msg, wrapped: wrapped} +} + +func (e parseFormatError) Error() string { + return fmt.Sprintf("%s: %s", e.msg, e.wrapped.Error()) +} + +func (e parseFormatError) Is(target error) bool { + return e.Error() == target.Error() +} + +func (e parseFormatError) Unwrap() error { + return e.wrapped +} + +const ( + runtimeID = int64(-1) +) diff --git a/vendor/github.com/google/cel-go/ext/guards.go b/vendor/github.com/google/cel-go/ext/guards.go index 4c7786a69..2c00bfe3a 100644 --- a/vendor/github.com/google/cel-go/ext/guards.go +++ b/vendor/github.com/google/cel-go/ext/guards.go @@ -15,9 +15,9 @@ package ext import ( + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // function invocation guards for common call signatures within extension functions. @@ -50,10 +50,10 @@ func listStringOrError(strs []string, err error) ref.Val { return types.DefaultTypeAdapter.NativeToValue(strs) } -func macroTargetMatchesNamespace(ns string, target *exprpb.Expr) bool { - switch target.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - if target.GetIdentExpr().GetName() != ns { +func macroTargetMatchesNamespace(ns string, target ast.Expr) bool { + switch target.Kind() { + case ast.IdentKind: + if target.AsIdent() != ns { return false } return true diff --git a/vendor/github.com/google/cel-go/ext/lists.go b/vendor/github.com/google/cel-go/ext/lists.go new file mode 100644 index 000000000..08751d08a --- /dev/null +++ b/vendor/github.com/google/cel-go/ext/lists.go @@ -0,0 +1,94 @@ +// Copyright 2023 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package ext + +import ( + "fmt" + + "github.com/google/cel-go/cel" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" + "github.com/google/cel-go/common/types/traits" +) + +// Lists returns a cel.EnvOption to configure extended functions for list manipulation. +// As a general note, all indices are zero-based. +// # Slice +// +// Returns a new sub-list using the indexes provided. +// +// .slice(, ) -> +// +// Examples: +// +// [1,2,3,4].slice(1, 3) // return [2, 3] +// [1,2,3,4].slice(2, 4) // return [3 ,4] +func Lists() cel.EnvOption { + return cel.Lib(listsLib{}) +} + +type listsLib struct{} + +// LibraryName implements the SingletonLibrary interface method. +func (listsLib) LibraryName() string { + return "cel.lib.ext.lists" +} + +// CompileOptions implements the Library interface method. +func (listsLib) CompileOptions() []cel.EnvOption { + listType := cel.ListType(cel.TypeParamType("T")) + return []cel.EnvOption{ + cel.Function("slice", + cel.MemberOverload("list_slice", + []*cel.Type{listType, cel.IntType, cel.IntType}, listType, + cel.FunctionBinding(func(args ...ref.Val) ref.Val { + list := args[0].(traits.Lister) + start := args[1].(types.Int) + end := args[2].(types.Int) + result, err := slice(list, start, end) + if err != nil { + return types.WrapErr(err) + } + return result + }), + ), + ), + } +} + +// ProgramOptions implements the Library interface method. +func (listsLib) ProgramOptions() []cel.ProgramOption { + return []cel.ProgramOption{} +} + +func slice(list traits.Lister, start, end types.Int) (ref.Val, error) { + listLength := list.Size().(types.Int) + if start < 0 || end < 0 { + return nil, fmt.Errorf("cannot slice(%d, %d), negative indexes not supported", start, end) + } + if start > end { + return nil, fmt.Errorf("cannot slice(%d, %d), start index must be less than or equal to end index", start, end) + } + if listLength < end { + return nil, fmt.Errorf("cannot slice(%d, %d), list is length %d", start, end, listLength) + } + + var newList []ref.Val + for i := types.Int(start); i < end; i++ { + val := list.Get(i) + newList = append(newList, val) + } + return types.DefaultTypeAdapter.NativeToValue(newList), nil +} diff --git a/vendor/github.com/google/cel-go/ext/math.go b/vendor/github.com/google/cel-go/ext/math.go index 1c8ad585a..65d7e2eb0 100644 --- a/vendor/github.com/google/cel-go/ext/math.go +++ b/vendor/github.com/google/cel-go/ext/math.go @@ -19,11 +19,10 @@ import ( "strings" "github.com/google/cel-go/cel" - "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // Math returns a cel.EnvOption to configure namespaced math helper macros and @@ -111,9 +110,9 @@ func (mathLib) CompileOptions() []cel.EnvOption { return []cel.EnvOption{ cel.Macros( // math.least(num, ...) - cel.NewReceiverVarArgMacro(leastMacro, mathLeast), + cel.ReceiverVarArgMacro(leastMacro, mathLeast), // math.greatest(num, ...) - cel.NewReceiverVarArgMacro(greatestMacro, mathGreatest), + cel.ReceiverVarArgMacro(greatestMacro, mathGreatest), ), cel.Function(minFunc, cel.Overload("math_@min_double", []*cel.Type{cel.DoubleType}, cel.DoubleType, @@ -187,69 +186,57 @@ func (mathLib) ProgramOptions() []cel.ProgramOption { return []cel.ProgramOption{} } -func mathLeast(meh cel.MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func mathLeast(meh cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) { if !macroTargetMatchesNamespace(mathNamespace, target) { return nil, nil } switch len(args) { case 0: - return nil, &common.Error{ - Message: "math.least() requires at least one argument", - Location: meh.OffsetLocation(target.GetId()), - } + return nil, meh.NewError(target.ID(), "math.least() requires at least one argument") case 1: if isListLiteralWithValidArgs(args[0]) || isValidArgType(args[0]) { - return meh.GlobalCall(minFunc, args[0]), nil - } - return nil, &common.Error{ - Message: "math.least() invalid single argument value", - Location: meh.OffsetLocation(args[0].GetId()), + return meh.NewCall(minFunc, args[0]), nil } + return nil, meh.NewError(args[0].ID(), "math.least() invalid single argument value") case 2: err := checkInvalidArgs(meh, "math.least()", args) if err != nil { return nil, err } - return meh.GlobalCall(minFunc, args...), nil + return meh.NewCall(minFunc, args...), nil default: err := checkInvalidArgs(meh, "math.least()", args) if err != nil { return nil, err } - return meh.GlobalCall(minFunc, meh.NewList(args...)), nil + return meh.NewCall(minFunc, meh.NewList(args...)), nil } } -func mathGreatest(meh cel.MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func mathGreatest(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) { if !macroTargetMatchesNamespace(mathNamespace, target) { return nil, nil } switch len(args) { case 0: - return nil, &common.Error{ - Message: "math.greatest() requires at least one argument", - Location: meh.OffsetLocation(target.GetId()), - } + return nil, mef.NewError(target.ID(), "math.greatest() requires at least one argument") case 1: if isListLiteralWithValidArgs(args[0]) || isValidArgType(args[0]) { - return meh.GlobalCall(maxFunc, args[0]), nil - } - return nil, &common.Error{ - Message: "math.greatest() invalid single argument value", - Location: meh.OffsetLocation(args[0].GetId()), + return mef.NewCall(maxFunc, args[0]), nil } + return nil, mef.NewError(args[0].ID(), "math.greatest() invalid single argument value") case 2: - err := checkInvalidArgs(meh, "math.greatest()", args) + err := checkInvalidArgs(mef, "math.greatest()", args) if err != nil { return nil, err } - return meh.GlobalCall(maxFunc, args...), nil + return mef.NewCall(maxFunc, args...), nil default: - err := checkInvalidArgs(meh, "math.greatest()", args) + err := checkInvalidArgs(mef, "math.greatest()", args) if err != nil { return nil, err } - return meh.GlobalCall(maxFunc, meh.NewList(args...)), nil + return mef.NewCall(maxFunc, mef.NewList(args...)), nil } } @@ -323,51 +310,48 @@ func maxList(numList ref.Val) ref.Val { } } -func checkInvalidArgs(meh cel.MacroExprHelper, funcName string, args []*exprpb.Expr) *common.Error { +func checkInvalidArgs(meh cel.MacroExprFactory, funcName string, args []ast.Expr) *cel.Error { for _, arg := range args { err := checkInvalidArgLiteral(funcName, arg) if err != nil { - return &common.Error{ - Message: err.Error(), - Location: meh.OffsetLocation(arg.GetId()), - } + return meh.NewError(arg.ID(), err.Error()) } } return nil } -func checkInvalidArgLiteral(funcName string, arg *exprpb.Expr) error { +func checkInvalidArgLiteral(funcName string, arg ast.Expr) error { if !isValidArgType(arg) { return fmt.Errorf("%s simple literal arguments must be numeric", funcName) } return nil } -func isValidArgType(arg *exprpb.Expr) bool { - switch arg.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - c := arg.GetConstExpr() - switch c.GetConstantKind().(type) { - case *exprpb.Constant_DoubleValue, *exprpb.Constant_Int64Value, *exprpb.Constant_Uint64Value: +func isValidArgType(arg ast.Expr) bool { + switch arg.Kind() { + case ast.LiteralKind: + c := ref.Val(arg.AsLiteral()) + switch c.(type) { + case types.Double, types.Int, types.Uint: return true default: return false } - case *exprpb.Expr_ListExpr, *exprpb.Expr_StructExpr: + case ast.ListKind, ast.MapKind, ast.StructKind: return false default: return true } } -func isListLiteralWithValidArgs(arg *exprpb.Expr) bool { - switch arg.GetExprKind().(type) { - case *exprpb.Expr_ListExpr: - list := arg.GetListExpr() - if len(list.GetElements()) == 0 { +func isListLiteralWithValidArgs(arg ast.Expr) bool { + switch arg.Kind() { + case ast.ListKind: + list := arg.AsList() + if list.Size() == 0 { return false } - for _, e := range list.GetElements() { + for _, e := range list.Elements() { if !isValidArgType(e) { return false } diff --git a/vendor/github.com/google/cel-go/ext/native.go b/vendor/github.com/google/cel-go/ext/native.go index acbc44b6d..0c2cd52f9 100644 --- a/vendor/github.com/google/cel-go/ext/native.go +++ b/vendor/github.com/google/cel-go/ext/native.go @@ -24,13 +24,11 @@ import ( "google.golang.org/protobuf/reflect/protoreflect" "github.com/google/cel-go/cel" - "github.com/google/cel-go/checker/decls" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/pb" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" structpb "google.golang.org/protobuf/types/known/structpb" ) @@ -81,7 +79,7 @@ var ( // the time that it is invoked. func NativeTypes(refTypes ...any) cel.EnvOption { return func(env *cel.Env) (*cel.Env, error) { - tp, err := newNativeTypeProvider(env.TypeAdapter(), env.TypeProvider(), refTypes...) + tp, err := newNativeTypeProvider(env.CELTypeAdapter(), env.CELTypeProvider(), refTypes...) if err != nil { return nil, err } @@ -93,7 +91,7 @@ func NativeTypes(refTypes ...any) cel.EnvOption { } } -func newNativeTypeProvider(adapter ref.TypeAdapter, provider ref.TypeProvider, refTypes ...any) (*nativeTypeProvider, error) { +func newNativeTypeProvider(adapter types.Adapter, provider types.Provider, refTypes ...any) (*nativeTypeProvider, error) { nativeTypes := make(map[string]*nativeType, len(refTypes)) for _, refType := range refTypes { switch rt := refType.(type) { @@ -122,18 +120,18 @@ func newNativeTypeProvider(adapter ref.TypeAdapter, provider ref.TypeProvider, r type nativeTypeProvider struct { nativeTypes map[string]*nativeType - baseAdapter ref.TypeAdapter - baseProvider ref.TypeProvider + baseAdapter types.Adapter + baseProvider types.Provider } -// EnumValue proxies to the ref.TypeProvider configured at the times the NativeTypes +// EnumValue proxies to the types.Provider configured at the times the NativeTypes // option was configured. func (tp *nativeTypeProvider) EnumValue(enumName string) ref.Val { return tp.baseProvider.EnumValue(enumName) } // FindIdent looks up natives type instances by qualified identifier, and if not found -// proxies to the composed ref.TypeProvider. +// proxies to the composed types.Provider. func (tp *nativeTypeProvider) FindIdent(typeName string) (ref.Val, bool) { if t, found := tp.nativeTypes[typeName]; found { return t, true @@ -141,32 +139,53 @@ func (tp *nativeTypeProvider) FindIdent(typeName string) (ref.Val, bool) { return tp.baseProvider.FindIdent(typeName) } -// FindType looks up CEL type-checker type definition by qualified identifier, and if not found -// proxies to the composed ref.TypeProvider. -func (tp *nativeTypeProvider) FindType(typeName string) (*exprpb.Type, bool) { +// FindStructType looks up the CEL type definition by qualified identifier, and if not found +// proxies to the composed types.Provider. +func (tp *nativeTypeProvider) FindStructType(typeName string) (*types.Type, bool) { if _, found := tp.nativeTypes[typeName]; found { - return decls.NewTypeType(decls.NewObjectType(typeName)), true + return types.NewTypeTypeWithParam(types.NewObjectType(typeName)), true } - return tp.baseProvider.FindType(typeName) + if celType, found := tp.baseProvider.FindStructType(typeName); found { + return celType, true + } + return tp.baseProvider.FindStructType(typeName) +} + +// FindStructFieldNames looks up the type definition first from the native types, then from +// the backing provider type set. If found, a set of field names corresponding to the type +// will be returned. +func (tp *nativeTypeProvider) FindStructFieldNames(typeName string) ([]string, bool) { + if t, found := tp.nativeTypes[typeName]; found { + fieldCount := t.refType.NumField() + fields := make([]string, fieldCount) + for i := 0; i < fieldCount; i++ { + fields[i] = t.refType.Field(i).Name + } + return fields, true + } + if celTypeFields, found := tp.baseProvider.FindStructFieldNames(typeName); found { + return celTypeFields, true + } + return tp.baseProvider.FindStructFieldNames(typeName) } -// FindFieldType looks up a native type's field definition, and if the type name is not a native -// type then proxies to the composed ref.TypeProvider -func (tp *nativeTypeProvider) FindFieldType(typeName, fieldName string) (*ref.FieldType, bool) { +// FindStructFieldType looks up a native type's field definition, and if the type name is not a native +// type then proxies to the composed types.Provider +func (tp *nativeTypeProvider) FindStructFieldType(typeName, fieldName string) (*types.FieldType, bool) { t, found := tp.nativeTypes[typeName] if !found { - return tp.baseProvider.FindFieldType(typeName, fieldName) + return tp.baseProvider.FindStructFieldType(typeName, fieldName) } refField, isDefined := t.hasField(fieldName) if !found || !isDefined { return nil, false } - exprType, ok := convertToExprType(refField.Type) + celType, ok := convertToCelType(refField.Type) if !ok { return nil, false } - return &ref.FieldType{ - Type: exprType, + return &types.FieldType{ + Type: celType, IsSet: func(obj any) bool { refVal := reflect.Indirect(reflect.ValueOf(obj)) refField := refVal.FieldByName(fieldName) @@ -243,75 +262,74 @@ func (tp *nativeTypeProvider) NativeToValue(val any) ref.Val { } } -// convertToExprType converts the Golang reflect.Type to a protobuf exprpb.Type. -func convertToExprType(refType reflect.Type) (*exprpb.Type, bool) { +func convertToCelType(refType reflect.Type) (*cel.Type, bool) { switch refType.Kind() { case reflect.Bool: - return decls.Bool, true + return cel.BoolType, true case reflect.Float32, reflect.Float64: - return decls.Double, true + return cel.DoubleType, true case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: if refType == durationType { - return decls.Duration, true + return cel.DurationType, true } - return decls.Int, true + return cel.IntType, true case reflect.String: - return decls.String, true + return cel.StringType, true case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return decls.Uint, true + return cel.UintType, true case reflect.Array, reflect.Slice: refElem := refType.Elem() if refElem == reflect.TypeOf(byte(0)) { - return decls.Bytes, true + return cel.BytesType, true } - elemType, ok := convertToExprType(refElem) + elemType, ok := convertToCelType(refElem) if !ok { return nil, false } - return decls.NewListType(elemType), true + return cel.ListType(elemType), true case reflect.Map: - keyType, ok := convertToExprType(refType.Key()) + keyType, ok := convertToCelType(refType.Key()) if !ok { return nil, false } // Ensure the key type is a int, bool, uint, string - elemType, ok := convertToExprType(refType.Elem()) + elemType, ok := convertToCelType(refType.Elem()) if !ok { return nil, false } - return decls.NewMapType(keyType, elemType), true + return cel.MapType(keyType, elemType), true case reflect.Struct: if refType == timestampType { - return decls.Timestamp, true + return cel.TimestampType, true } - return decls.NewObjectType( + return cel.ObjectType( fmt.Sprintf("%s.%s", simplePkgAlias(refType.PkgPath()), refType.Name()), ), true case reflect.Pointer: if refType.Implements(pbMsgInterfaceType) { pbMsg := reflect.New(refType.Elem()).Interface().(protoreflect.ProtoMessage) - return decls.NewObjectType(string(pbMsg.ProtoReflect().Descriptor().FullName())), true + return cel.ObjectType(string(pbMsg.ProtoReflect().Descriptor().FullName())), true } - return convertToExprType(refType.Elem()) + return convertToCelType(refType.Elem()) } return nil, false } -func newNativeObject(adapter ref.TypeAdapter, val any, refValue reflect.Value) ref.Val { +func newNativeObject(adapter types.Adapter, val any, refValue reflect.Value) ref.Val { valType, err := newNativeType(refValue.Type()) if err != nil { return types.NewErr(err.Error()) } return &nativeObj{ - TypeAdapter: adapter, - val: val, - valType: valType, - refValue: refValue, + Adapter: adapter, + val: val, + valType: valType, + refValue: refValue, } } type nativeObj struct { - ref.TypeAdapter + types.Adapter val any valType *nativeType refValue reflect.Value @@ -520,11 +538,11 @@ func (t *nativeType) hasField(fieldName string) (reflect.StructField, bool) { return f, true } -func adaptFieldValue(adapter ref.TypeAdapter, refField reflect.Value) ref.Val { +func adaptFieldValue(adapter types.Adapter, refField reflect.Value) ref.Val { return adapter.NativeToValue(getFieldValue(adapter, refField)) } -func getFieldValue(adapter ref.TypeAdapter, refField reflect.Value) any { +func getFieldValue(adapter types.Adapter, refField reflect.Value) any { if refField.IsZero() { switch refField.Kind() { case reflect.Array, reflect.Slice: diff --git a/vendor/github.com/google/cel-go/ext/protos.go b/vendor/github.com/google/cel-go/ext/protos.go index b905e710c..68796f60a 100644 --- a/vendor/github.com/google/cel-go/ext/protos.go +++ b/vendor/github.com/google/cel-go/ext/protos.go @@ -16,9 +16,7 @@ package ext import ( "github.com/google/cel-go/cel" - "github.com/google/cel-go/common" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" ) // Protos returns a cel.EnvOption to configure extended macros and functions for @@ -73,9 +71,9 @@ func (protoLib) CompileOptions() []cel.EnvOption { return []cel.EnvOption{ cel.Macros( // proto.getExt(msg, select_expression) - cel.NewReceiverMacro(getExtension, 2, getProtoExt), + cel.ReceiverMacro(getExtension, 2, getProtoExt), // proto.hasExt(msg, select_expression) - cel.NewReceiverMacro(hasExtension, 2, hasProtoExt), + cel.ReceiverMacro(hasExtension, 2, hasProtoExt), ), } } @@ -86,59 +84,56 @@ func (protoLib) ProgramOptions() []cel.ProgramOption { } // hasProtoExt generates a test-only select expression for a fully-qualified extension name on a protobuf message. -func hasProtoExt(meh cel.MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func hasProtoExt(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) { if !macroTargetMatchesNamespace(protoNamespace, target) { return nil, nil } - extensionField, err := getExtFieldName(meh, args[1]) + extensionField, err := getExtFieldName(mef, args[1]) if err != nil { return nil, err } - return meh.PresenceTest(args[0], extensionField), nil + return mef.NewPresenceTest(args[0], extensionField), nil } // getProtoExt generates a select expression for a fully-qualified extension name on a protobuf message. -func getProtoExt(meh cel.MacroExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func getProtoExt(mef cel.MacroExprFactory, target ast.Expr, args []ast.Expr) (ast.Expr, *cel.Error) { if !macroTargetMatchesNamespace(protoNamespace, target) { return nil, nil } - extFieldName, err := getExtFieldName(meh, args[1]) + extFieldName, err := getExtFieldName(mef, args[1]) if err != nil { return nil, err } - return meh.Select(args[0], extFieldName), nil + return mef.NewSelect(args[0], extFieldName), nil } -func getExtFieldName(meh cel.MacroExprHelper, expr *exprpb.Expr) (string, *common.Error) { +func getExtFieldName(mef cel.MacroExprFactory, expr ast.Expr) (string, *cel.Error) { isValid := false extensionField := "" - switch expr.GetExprKind().(type) { - case *exprpb.Expr_SelectExpr: + switch expr.Kind() { + case ast.SelectKind: extensionField, isValid = validateIdentifier(expr) } if !isValid { - return "", &common.Error{ - Message: "invalid extension field", - Location: meh.OffsetLocation(expr.GetId()), - } + return "", mef.NewError(expr.ID(), "invalid extension field") } return extensionField, nil } -func validateIdentifier(expr *exprpb.Expr) (string, bool) { - switch expr.GetExprKind().(type) { - case *exprpb.Expr_IdentExpr: - return expr.GetIdentExpr().GetName(), true - case *exprpb.Expr_SelectExpr: - sel := expr.GetSelectExpr() - if sel.GetTestOnly() { +func validateIdentifier(expr ast.Expr) (string, bool) { + switch expr.Kind() { + case ast.IdentKind: + return expr.AsIdent(), true + case ast.SelectKind: + sel := expr.AsSelect() + if sel.IsTestOnly() { return "", false } - opStr, isIdent := validateIdentifier(sel.GetOperand()) + opStr, isIdent := validateIdentifier(sel.Operand()) if !isIdent { return "", false } - return opStr + "." + sel.GetField(), true + return opStr + "." + sel.FieldName(), true default: return "", false } diff --git a/vendor/github.com/google/cel-go/ext/sets.go b/vendor/github.com/google/cel-go/ext/sets.go index 4820d6199..833c15f61 100644 --- a/vendor/github.com/google/cel-go/ext/sets.go +++ b/vendor/github.com/google/cel-go/ext/sets.go @@ -15,10 +15,14 @@ package ext import ( + "math" + "github.com/google/cel-go/cel" + "github.com/google/cel-go/checker" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" + "github.com/google/cel-go/interpreter" ) // Sets returns a cel.EnvOption to configure namespaced set relationship @@ -95,12 +99,24 @@ func (setsLib) CompileOptions() []cel.EnvOption { cel.Function("sets.intersects", cel.Overload("list_sets_intersects_list", []*cel.Type{listType, listType}, cel.BoolType, cel.BinaryBinding(setsIntersects))), + cel.CostEstimatorOptions( + checker.OverloadCostEstimate("list_sets_contains_list", estimateSetsCost(1)), + checker.OverloadCostEstimate("list_sets_intersects_list", estimateSetsCost(1)), + // equivalence requires potentially two m*n comparisons to ensure each list is contained by the other + checker.OverloadCostEstimate("list_sets_equivalent_list", estimateSetsCost(2)), + ), } } // ProgramOptions implements the Library interface method. func (setsLib) ProgramOptions() []cel.ProgramOption { - return []cel.ProgramOption{} + return []cel.ProgramOption{ + cel.CostTrackerOptions( + interpreter.OverloadCostTracker("list_sets_contains_list", trackSetsCost(1)), + interpreter.OverloadCostTracker("list_sets_intersects_list", trackSetsCost(1)), + interpreter.OverloadCostTracker("list_sets_equivalent_list", trackSetsCost(2)), + ), + } } func setsIntersects(listA, listB ref.Val) ref.Val { @@ -136,3 +152,46 @@ func setsEquivalent(listA, listB ref.Val) ref.Val { } return setsContains(listB, listA) } + +func estimateSetsCost(costFactor float64) checker.FunctionEstimator { + return func(estimator checker.CostEstimator, target *checker.AstNode, args []checker.AstNode) *checker.CallEstimate { + if len(args) == 2 { + arg0Size := estimateSize(estimator, args[0]) + arg1Size := estimateSize(estimator, args[1]) + costEstimate := arg0Size.Multiply(arg1Size).MultiplyByCostFactor(costFactor).Add(callCostEstimate) + return &checker.CallEstimate{CostEstimate: costEstimate} + } + return nil + } +} + +func estimateSize(estimator checker.CostEstimator, node checker.AstNode) checker.SizeEstimate { + if l := node.ComputedSize(); l != nil { + return *l + } + if l := estimator.EstimateSize(node); l != nil { + return *l + } + return checker.SizeEstimate{Min: 0, Max: math.MaxUint64} +} + +func trackSetsCost(costFactor float64) interpreter.FunctionTracker { + return func(args []ref.Val, _ ref.Val) *uint64 { + lhsSize := actualSize(args[0]) + rhsSize := actualSize(args[1]) + cost := callCost + uint64(float64(lhsSize*rhsSize)*costFactor) + return &cost + } +} + +func actualSize(value ref.Val) uint64 { + if sz, ok := value.(traits.Sizer); ok { + return uint64(sz.Size().(types.Int)) + } + return 1 +} + +var ( + callCostEstimate = checker.CostEstimate{Min: 1, Max: 1} + callCost = uint64(1) +) diff --git a/vendor/github.com/google/cel-go/ext/strings.go b/vendor/github.com/google/cel-go/ext/strings.go index 8455d5829..1faa6ed7d 100644 --- a/vendor/github.com/google/cel-go/ext/strings.go +++ b/vendor/github.com/google/cel-go/ext/strings.go @@ -21,19 +21,16 @@ import ( "fmt" "math" "reflect" - "sort" "strings" "unicode" "unicode/utf8" "golang.org/x/text/language" - "golang.org/x/text/message" "github.com/google/cel-go/cel" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - "github.com/google/cel-go/interpreter" ) const ( @@ -173,7 +170,7 @@ const ( // 'TacoCat'.lowerAscii() // returns 'tacocat' // 'TacoCÆt Xii'.lowerAscii() // returns 'tacocÆt xii' // -// # Quote +// # Strings.Quote // // Introduced in version: 1 // @@ -205,6 +202,8 @@ const ( // 'hello hello'.replace('he', 'we', -1) // returns 'wello wello' // 'hello hello'.replace('he', 'we', 1) // returns 'wello hello' // 'hello hello'.replace('he', 'we', 0) // returns 'hello hello' +// 'hello hello'.replace('', '_') // returns '_h_e_l_l_o_ _h_e_l_l_o_' +// 'hello hello'.replace('h', '') // returns 'ello ello' // // # Split // @@ -270,8 +269,26 @@ const ( // // 'TacoCat'.upperAscii() // returns 'TACOCAT' // 'TacoCÆt Xii'.upperAscii() // returns 'TACOCÆT XII' +// +// # Reverse +// +// Introduced at version: 3 +// +// Returns a new string whose characters are the same as the target string, only formatted in +// reverse order. +// This function relies on converting strings to rune arrays in order to reverse +// +// .reverse() -> +// +// Examples: +// +// 'gums'.reverse() // returns 'smug' +// 'John Smith'.reverse() // returns 'htimS nhoJ' func Strings(options ...StringsOption) cel.EnvOption { - s := &stringLib{version: math.MaxUint32} + s := &stringLib{ + version: math.MaxUint32, + validateFormat: true, + } for _, o := range options { s = o(s) } @@ -279,8 +296,9 @@ func Strings(options ...StringsOption) cel.EnvOption { } type stringLib struct { - locale string - version uint32 + locale string + version uint32 + validateFormat bool } // LibraryName implements the SingletonLibrary interface method. @@ -301,26 +319,39 @@ func StringsLocale(locale string) StringsOption { } } -// StringsVersion configures the version of the string library. The version limits which -// functions are available. Only functions introduced below or equal to the given -// version included in the library. See the library documentation to determine -// which version a function was introduced at. If the documentation does not -// state which version a function was introduced at, it can be assumed to be -// introduced at version 0, when the library was first created. -// If this option is not set, all functions are available. -func StringsVersion(version uint32) func(lib *stringLib) *stringLib { - return func(sl *stringLib) *stringLib { - sl.version = version - return sl +// StringsVersion configures the version of the string library. +// +// The version limits which functions are available. Only functions introduced +// below or equal to the given version included in the library. If this option +// is not set, all functions are available. +// +// See the library documentation to determine which version a function was introduced. +// If the documentation does not state which version a function was introduced, it can +// be assumed to be introduced at version 0, when the library was first created. +func StringsVersion(version uint32) StringsOption { + return func(lib *stringLib) *stringLib { + lib.version = version + return lib + } +} + +// StringsValidateFormatCalls validates type-checked ASTs to ensure that string.format() calls have +// valid formatting clauses and valid argument types for each clause. +// +// Enabled by default. +func StringsValidateFormatCalls(value bool) StringsOption { + return func(s *stringLib) *stringLib { + s.validateFormat = value + return s } } // CompileOptions implements the Library interface method. -func (sl *stringLib) CompileOptions() []cel.EnvOption { +func (lib *stringLib) CompileOptions() []cel.EnvOption { formatLocale := "en_US" - if sl.locale != "" { + if lib.locale != "" { // ensure locale is properly-formed if set - _, err := language.Parse(sl.locale) + _, err := language.Parse(lib.locale) if err != nil { return []cel.EnvOption{ func(e *cel.Env) (*cel.Env, error) { @@ -328,7 +359,7 @@ func (sl *stringLib) CompileOptions() []cel.EnvOption { }, } } - formatLocale = sl.locale + formatLocale = lib.locale } opts := []cel.EnvOption{ @@ -432,22 +463,24 @@ func (sl *stringLib) CompileOptions() []cel.EnvOption { return stringOrError(upperASCII(string(s))) }))), } - if sl.version >= 1 { + if lib.version >= 1 { opts = append(opts, cel.Function("format", cel.MemberOverload("string_format", []*cel.Type{cel.StringType, cel.ListType(cel.DynType)}, cel.StringType, cel.FunctionBinding(func(args ...ref.Val) ref.Val { s := string(args[0].(types.String)) formatArgs := args[1].(traits.Lister) - return stringOrError(interpreter.ParseFormatString(s, &stringFormatter{}, &stringArgList{formatArgs}, formatLocale)) + return stringOrError(parseFormatString(s, &stringFormatter{}, &stringArgList{formatArgs}, formatLocale)) }))), cel.Function("strings.quote", cel.Overload("strings_quote", []*cel.Type{cel.StringType}, cel.StringType, cel.UnaryBinding(func(str ref.Val) ref.Val { s := str.(types.String) return stringOrError(quote(string(s))) - })))) + }))), + + cel.ASTValidators(stringFormatValidator{})) } - if sl.version >= 2 { + if lib.version >= 2 { opts = append(opts, cel.Function("join", cel.MemberOverload("list_join", []*cel.Type{cel.ListType(cel.StringType)}, cel.StringType, @@ -469,7 +502,7 @@ func (sl *stringLib) CompileOptions() []cel.EnvOption { cel.UnaryBinding(func(list ref.Val) ref.Val { l, err := list.ConvertToNative(stringListType) if err != nil { - return types.NewErr(err.Error()) + return types.WrapErr(err) } return stringOrError(join(l.([]string))) })), @@ -477,13 +510,26 @@ func (sl *stringLib) CompileOptions() []cel.EnvOption { cel.BinaryBinding(func(list, delim ref.Val) ref.Val { l, err := list.ConvertToNative(stringListType) if err != nil { - return types.NewErr(err.Error()) + return types.WrapErr(err) } d := delim.(types.String) return stringOrError(joinSeparator(l.([]string), string(d))) }))), ) } + if lib.version >= 3 { + opts = append(opts, + cel.Function("reverse", + cel.MemberOverload("reverse", []*cel.Type{cel.StringType}, cel.StringType, + cel.UnaryBinding(func(str ref.Val) ref.Val { + s := str.(types.String) + return stringOrError(reverse(string(s))) + }))), + ) + } + if lib.validateFormat { + opts = append(opts, cel.ASTValidators(stringFormatValidator{})) + } return opts } @@ -634,6 +680,14 @@ func upperASCII(str string) (string, error) { return string(runes), nil } +func reverse(str string) (string, error) { + chars := []rune(str) + for i, j := 0, len(chars)-1; i < j; i, j = i+1, j-1 { + chars[i], chars[j] = chars[j], chars[i] + } + return string(chars), nil +} + func joinSeparator(strs []string, separator string) (string, error) { return strings.Join(strs, separator), nil } @@ -659,238 +713,6 @@ func joinValSeparator(strs traits.Lister, separator string) (string, error) { return sb.String(), nil } -type clauseImpl func(ref.Val, string) (string, error) - -func clauseForType(argType ref.Type) (clauseImpl, error) { - switch argType { - case types.IntType, types.UintType: - return formatDecimal, nil - case types.StringType, types.BytesType, types.BoolType, types.NullType, types.TypeType: - return FormatString, nil - case types.TimestampType, types.DurationType: - // special case to ensure timestamps/durations get printed as CEL literals - return func(arg ref.Val, locale string) (string, error) { - argStrVal := arg.ConvertToType(types.StringType) - argStr := argStrVal.Value().(string) - if arg.Type() == types.TimestampType { - return fmt.Sprintf("timestamp(%q)", argStr), nil - } - if arg.Type() == types.DurationType { - return fmt.Sprintf("duration(%q)", argStr), nil - } - return "", fmt.Errorf("cannot convert argument of type %s to timestamp/duration", arg.Type().TypeName()) - }, nil - case types.ListType: - return formatList, nil - case types.MapType: - return formatMap, nil - case types.DoubleType: - // avoid formatFixed so we can output a period as the decimal separator in order - // to always be a valid CEL literal - return func(arg ref.Val, locale string) (string, error) { - argDouble, ok := arg.Value().(float64) - if !ok { - return "", fmt.Errorf("couldn't convert %s to float64", arg.Type().TypeName()) - } - fmtStr := fmt.Sprintf("%%.%df", defaultPrecision) - return fmt.Sprintf(fmtStr, argDouble), nil - }, nil - case types.TypeType: - return func(arg ref.Val, locale string) (string, error) { - return fmt.Sprintf("type(%s)", arg.Value().(string)), nil - }, nil - default: - return nil, fmt.Errorf("no formatting function for %s", argType.TypeName()) - } -} - -func formatList(arg ref.Val, locale string) (string, error) { - argList := arg.(traits.Lister) - argIterator := argList.Iterator() - var listStrBuilder strings.Builder - _, err := listStrBuilder.WriteRune('[') - if err != nil { - return "", fmt.Errorf("error writing to list string: %w", err) - } - for argIterator.HasNext() == types.True { - member := argIterator.Next() - memberFormat, err := clauseForType(member.Type()) - if err != nil { - return "", err - } - unquotedStr, err := memberFormat(member, locale) - if err != nil { - return "", err - } - str := quoteForCEL(member, unquotedStr) - _, err = listStrBuilder.WriteString(str) - if err != nil { - return "", fmt.Errorf("error writing to list string: %w", err) - } - if argIterator.HasNext() == types.True { - _, err = listStrBuilder.WriteString(", ") - if err != nil { - return "", fmt.Errorf("error writing to list string: %w", err) - } - } - } - _, err = listStrBuilder.WriteRune(']') - if err != nil { - return "", fmt.Errorf("error writing to list string: %w", err) - } - return listStrBuilder.String(), nil -} - -func formatMap(arg ref.Val, locale string) (string, error) { - argMap := arg.(traits.Mapper) - argIterator := argMap.Iterator() - type mapPair struct { - key string - value string - } - argPairs := make([]mapPair, argMap.Size().Value().(int64)) - i := 0 - for argIterator.HasNext() == types.True { - key := argIterator.Next() - var keyFormat clauseImpl - switch key.Type() { - case types.StringType, types.BoolType: - keyFormat = FormatString - case types.IntType, types.UintType: - keyFormat = formatDecimal - default: - return "", fmt.Errorf("no formatting function for map key of type %s", key.Type().TypeName()) - } - unquotedKeyStr, err := keyFormat(key, locale) - if err != nil { - return "", err - } - keyStr := quoteForCEL(key, unquotedKeyStr) - value, found := argMap.Find(key) - if !found { - return "", fmt.Errorf("could not find key: %q", key) - } - valueFormat, err := clauseForType(value.Type()) - if err != nil { - return "", err - } - unquotedValueStr, err := valueFormat(value, locale) - if err != nil { - return "", err - } - valueStr := quoteForCEL(value, unquotedValueStr) - argPairs[i] = mapPair{keyStr, valueStr} - i++ - } - sort.SliceStable(argPairs, func(x, y int) bool { - return argPairs[x].key < argPairs[y].key - }) - var mapStrBuilder strings.Builder - _, err := mapStrBuilder.WriteRune('{') - if err != nil { - return "", fmt.Errorf("error writing to map string: %w", err) - } - for i, entry := range argPairs { - _, err = mapStrBuilder.WriteString(fmt.Sprintf("%s:%s", entry.key, entry.value)) - if err != nil { - return "", fmt.Errorf("error writing to map string: %w", err) - } - if i < len(argPairs)-1 { - _, err = mapStrBuilder.WriteString(", ") - if err != nil { - return "", fmt.Errorf("error writing to map string: %w", err) - } - } - } - _, err = mapStrBuilder.WriteRune('}') - if err != nil { - return "", fmt.Errorf("error writing to map string: %w", err) - } - return mapStrBuilder.String(), nil -} - -// quoteForCEL takes a formatted, unquoted value and quotes it in a manner -// suitable for embedding directly in CEL. -func quoteForCEL(refVal ref.Val, unquotedValue string) string { - switch refVal.Type() { - case types.StringType: - return fmt.Sprintf("%q", unquotedValue) - case types.BytesType: - return fmt.Sprintf("b%q", unquotedValue) - case types.DoubleType: - // special case to handle infinity/NaN - num := refVal.Value().(float64) - if math.IsInf(num, 1) || math.IsInf(num, -1) || math.IsNaN(num) { - return fmt.Sprintf("%q", unquotedValue) - } - return unquotedValue - default: - return unquotedValue - } -} - -// FormatString returns the string representation of a CEL value. -// It is used to implement the %s specifier in the (string).format() extension -// function. -func FormatString(arg ref.Val, locale string) (string, error) { - switch arg.Type() { - case types.ListType: - return formatList(arg, locale) - case types.MapType: - return formatMap(arg, locale) - case types.IntType, types.UintType, types.DoubleType, - types.BoolType, types.StringType, types.TimestampType, types.BytesType, types.DurationType, types.TypeType: - argStrVal := arg.ConvertToType(types.StringType) - argStr, ok := argStrVal.Value().(string) - if !ok { - return "", fmt.Errorf("could not convert argument %q to string", argStrVal) - } - return argStr, nil - case types.NullType: - return "null", nil - default: - return "", fmt.Errorf("string clause can only be used on strings, bools, bytes, ints, doubles, maps, lists, types, durations, and timestamps, was given %s", arg.Type().TypeName()) - } -} - -func formatDecimal(arg ref.Val, locale string) (string, error) { - switch arg.Type() { - case types.IntType: - argInt, ok := arg.ConvertToType(types.IntType).Value().(int64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to int64", arg.Value()) - } - return fmt.Sprintf("%d", argInt), nil - case types.UintType: - argInt, ok := arg.ConvertToType(types.UintType).Value().(uint64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to uint64", arg.Value()) - } - return fmt.Sprintf("%d", argInt), nil - default: - return "", fmt.Errorf("decimal clause can only be used on integers, was given %s", arg.Type().TypeName()) - } -} - -func matchLanguage(locale string) (language.Tag, error) { - matcher, err := makeMatcher(locale) - if err != nil { - return language.Und, err - } - tag, _ := language.MatchStrings(matcher, locale) - return tag, nil -} - -func makeMatcher(locale string) (language.Matcher, error) { - tags := make([]language.Tag, 0) - tag, err := language.Parse(locale) - if err != nil { - return nil, err - } - tags = append(tags, tag) - return language.NewMatcher(tags), nil -} - // quote implements a string quoting function. The string will be wrapped in // double quotes, and all valid CEL escape sequences will be escaped to show up // literally if printed. If the input contains any invalid UTF-8, the invalid runes @@ -938,156 +760,6 @@ func sanitize(s string) string { return sanitizedStringBuilder.String() } -type stringFormatter struct{} - -func (c *stringFormatter) String(arg ref.Val, locale string) (string, error) { - return FormatString(arg, locale) -} - -func (c *stringFormatter) Decimal(arg ref.Val, locale string) (string, error) { - return formatDecimal(arg, locale) -} - -func (c *stringFormatter) Fixed(precision *int) func(ref.Val, string) (string, error) { - if precision == nil { - precision = new(int) - *precision = defaultPrecision - } - return func(arg ref.Val, locale string) (string, error) { - strException := false - if arg.Type() == types.StringType { - argStr := arg.Value().(string) - if argStr == "NaN" || argStr == "Infinity" || argStr == "-Infinity" { - strException = true - } - } - if arg.Type() != types.DoubleType && !strException { - return "", fmt.Errorf("fixed-point clause can only be used on doubles, was given %s", arg.Type().TypeName()) - } - argFloatVal := arg.ConvertToType(types.DoubleType) - argFloat, ok := argFloatVal.Value().(float64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to float64", argFloatVal.Value()) - } - fmtStr := fmt.Sprintf("%%.%df", *precision) - - matchedLocale, err := matchLanguage(locale) - if err != nil { - return "", fmt.Errorf("error matching locale: %w", err) - } - return message.NewPrinter(matchedLocale).Sprintf(fmtStr, argFloat), nil - } -} - -func (c *stringFormatter) Scientific(precision *int) func(ref.Val, string) (string, error) { - if precision == nil { - precision = new(int) - *precision = defaultPrecision - } - return func(arg ref.Val, locale string) (string, error) { - strException := false - if arg.Type() == types.StringType { - argStr := arg.Value().(string) - if argStr == "NaN" || argStr == "Infinity" || argStr == "-Infinity" { - strException = true - } - } - if arg.Type() != types.DoubleType && !strException { - return "", fmt.Errorf("scientific clause can only be used on doubles, was given %s", arg.Type().TypeName()) - } - argFloatVal := arg.ConvertToType(types.DoubleType) - argFloat, ok := argFloatVal.Value().(float64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to float64", argFloatVal.Value()) - } - matchedLocale, err := matchLanguage(locale) - if err != nil { - return "", fmt.Errorf("error matching locale: %w", err) - } - fmtStr := fmt.Sprintf("%%%de", *precision) - return message.NewPrinter(matchedLocale).Sprintf(fmtStr, argFloat), nil - } -} - -func (c *stringFormatter) Binary(arg ref.Val, locale string) (string, error) { - switch arg.Type() { - case types.IntType: - argInt := arg.Value().(int64) - // locale is intentionally unused as integers formatted as binary - // strings are locale-independent - return fmt.Sprintf("%b", argInt), nil - case types.UintType: - argInt := arg.Value().(uint64) - return fmt.Sprintf("%b", argInt), nil - case types.BoolType: - argBool := arg.Value().(bool) - if argBool { - return "1", nil - } - return "0", nil - default: - return "", fmt.Errorf("only integers and bools can be formatted as binary, was given %s", arg.Type().TypeName()) - } -} - -func (c *stringFormatter) Hex(useUpper bool) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - fmtStr := "%x" - if useUpper { - fmtStr = "%X" - } - switch arg.Type() { - case types.StringType, types.BytesType: - if arg.Type() == types.BytesType { - return fmt.Sprintf(fmtStr, arg.Value().([]byte)), nil - } - return fmt.Sprintf(fmtStr, arg.Value().(string)), nil - case types.IntType: - argInt, ok := arg.Value().(int64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to int64", arg.Value()) - } - return fmt.Sprintf(fmtStr, argInt), nil - case types.UintType: - argInt, ok := arg.Value().(uint64) - if !ok { - return "", fmt.Errorf("could not convert \"%s\" to uint64", arg.Value()) - } - return fmt.Sprintf(fmtStr, argInt), nil - default: - return "", fmt.Errorf("only integers, byte buffers, and strings can be formatted as hex, was given %s", arg.Type().TypeName()) - } - } -} - -func (c *stringFormatter) Octal(arg ref.Val, locale string) (string, error) { - switch arg.Type() { - case types.IntType: - argInt := arg.Value().(int64) - return fmt.Sprintf("%o", argInt), nil - case types.UintType: - argInt := arg.Value().(uint64) - return fmt.Sprintf("%o", argInt), nil - default: - return "", fmt.Errorf("octal clause can only be used on integers, was given %s", arg.Type().TypeName()) - } -} - -type stringArgList struct { - args traits.Lister -} - -func (c *stringArgList) Arg(index int64) (ref.Val, error) { - if index >= c.args.Size().Value().(int64) { - return nil, fmt.Errorf("index %d out of range", index) - } - return c.args.Get(types.Int(index)), nil -} - -func (c *stringArgList) ArgSize() int64 { - return c.args.Size().Value().(int64) -} - var ( stringListType = reflect.TypeOf([]string{}) ) diff --git a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel b/vendor/github.com/google/cel-go/interpreter/BUILD.bazel index b6d04e000..220e23d47 100644 --- a/vendor/github.com/google/cel-go/interpreter/BUILD.bazel +++ b/vendor/github.com/google/cel-go/interpreter/BUILD.bazel @@ -14,7 +14,6 @@ go_library( "decorators.go", "dispatcher.go", "evalstate.go", - "formatting.go", "interpretable.go", "interpreter.go", "optimizations.go", @@ -25,13 +24,14 @@ go_library( importpath = "github.com/google/cel-go/interpreter", deps = [ "//common:go_default_library", + "//common/ast:go_default_library", "//common/containers:go_default_library", + "//common/functions:go_default_library", "//common/operators:go_default_library", "//common/overloads:go_default_library", "//common/types:go_default_library", "//common/types/ref:go_default_library", "//common/types/traits:go_default_library", - "//interpreter/functions:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//types/known/durationpb:go_default_library", @@ -56,12 +56,13 @@ go_test( ], deps = [ "//checker:go_default_library", - "//checker/decls:go_default_library", "//common/containers:go_default_library", "//common/debug:go_default_library", + "//common/decls:go_default_library", + "//common/functions:go_default_library", "//common/operators:go_default_library", + "//common/stdlib:go_default_library", "//common/types:go_default_library", - "//interpreter/functions:go_default_library", "//parser:go_default_library", "//test:go_default_library", "//test/proto2pb:go_default_library", diff --git a/vendor/github.com/google/cel-go/interpreter/activation.go b/vendor/github.com/google/cel-go/interpreter/activation.go index f82e4e903..a80264451 100644 --- a/vendor/github.com/google/cel-go/interpreter/activation.go +++ b/vendor/github.com/google/cel-go/interpreter/activation.go @@ -58,7 +58,7 @@ func (emptyActivation) Parent() Activation { return nil } // The output of the lazy binding will overwrite the variable reference in the internal map. // // Values which are not represented as ref.Val types on input may be adapted to a ref.Val using -// the ref.TypeAdapter configured in the environment. +// the types.Adapter configured in the environment. func NewActivation(bindings any) (Activation, error) { if bindings == nil { return nil, errors.New("bindings must be non-nil") diff --git a/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go b/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go index afb7c8d5b..1fbaaf17e 100644 --- a/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go +++ b/vendor/github.com/google/cel-go/interpreter/attribute_patterns.go @@ -15,6 +15,8 @@ package interpreter import ( + "fmt" + "github.com/google/cel-go/common/containers" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" @@ -177,8 +179,8 @@ func numericValueEquals(value any, celValue ref.Val) bool { // NewPartialAttributeFactory returns an AttributeFactory implementation capable of performing // AttributePattern matches with PartialActivation inputs. func NewPartialAttributeFactory(container *containers.Container, - adapter ref.TypeAdapter, - provider ref.TypeProvider) AttributeFactory { + adapter types.Adapter, + provider types.Provider) AttributeFactory { fac := NewAttributeFactory(container, adapter, provider) return &partialAttributeFactory{ AttributeFactory: fac, @@ -191,8 +193,8 @@ func NewPartialAttributeFactory(container *containers.Container, type partialAttributeFactory struct { AttributeFactory container *containers.Container - adapter ref.TypeAdapter - provider ref.TypeProvider + adapter types.Adapter + provider types.Provider } // AbsoluteAttribute implementation of the AttributeFactory interface which wraps the @@ -241,12 +243,15 @@ func (fac *partialAttributeFactory) matchesUnknownPatterns( vars PartialActivation, attrID int64, variableNames []string, - qualifiers []Qualifier) (types.Unknown, error) { + qualifiers []Qualifier) (*types.Unknown, error) { patterns := vars.UnknownAttributePatterns() candidateIndices := map[int]struct{}{} for _, variable := range variableNames { for i, pat := range patterns { if pat.VariableMatches(variable) { + if len(qualifiers) == 0 { + return types.NewUnknown(attrID, types.NewAttributeTrail(variable)), nil + } candidateIndices[i] = struct{}{} } } @@ -255,10 +260,6 @@ func (fac *partialAttributeFactory) matchesUnknownPatterns( if len(candidateIndices) == 0 { return nil, nil } - // Determine whether to return early if there are no qualifiers. - if len(qualifiers) == 0 { - return types.Unknown{attrID}, nil - } // Resolve the attribute qualifiers into a static set. This prevents more dynamic // Attribute resolutions than necessary when there are multiple unknown patterns // that traverse the same Attribute-based qualifier field. @@ -300,7 +301,28 @@ func (fac *partialAttributeFactory) matchesUnknownPatterns( } } if isUnk { - return types.Unknown{matchExprID}, nil + attr := types.NewAttributeTrail(pat.variable) + for i := 0; i < len(qualPats) && i < len(newQuals); i++ { + if qual, ok := newQuals[i].(ConstantQualifier); ok { + switch v := qual.Value().Value().(type) { + case bool: + types.QualifyAttribute[bool](attr, v) + case float64: + types.QualifyAttribute[int64](attr, int64(v)) + case int64: + types.QualifyAttribute[int64](attr, v) + case string: + types.QualifyAttribute[string](attr, v) + case uint64: + types.QualifyAttribute[uint64](attr, v) + default: + types.QualifyAttribute[string](attr, fmt.Sprintf("%v", v)) + } + } else { + types.QualifyAttribute[string](attr, "*") + } + } + return types.NewUnknown(matchExprID, attr), nil } } return nil, nil diff --git a/vendor/github.com/google/cel-go/interpreter/attributes.go b/vendor/github.com/google/cel-go/interpreter/attributes.go index 1b19dc2b5..ca97bdfcf 100644 --- a/vendor/github.com/google/cel-go/interpreter/attributes.go +++ b/vendor/github.com/google/cel-go/interpreter/attributes.go @@ -22,8 +22,6 @@ import ( "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // AttributeFactory provides methods creating Attribute and Qualifier values. @@ -61,7 +59,7 @@ type AttributeFactory interface { // The qualifier may consider the object type being qualified, if present. If absent, the // qualification should be considered dynamic and the qualification should still work, though // it may be sub-optimal. - NewQualifier(objType *exprpb.Type, qualID int64, val any, opt bool) (Qualifier, error) + NewQualifier(objType *types.Type, qualID int64, val any, opt bool) (Qualifier, error) } // Qualifier marker interface for designating different qualifier values and where they appear @@ -131,7 +129,7 @@ type NamespacedAttribute interface { // NewAttributeFactory returns a default AttributeFactory which is produces Attribute values // capable of resolving types by simple names and qualify the values using the supported qualifier // types: bool, int, string, and uint. -func NewAttributeFactory(cont *containers.Container, a ref.TypeAdapter, p ref.TypeProvider) AttributeFactory { +func NewAttributeFactory(cont *containers.Container, a types.Adapter, p types.Provider) AttributeFactory { return &attrFactory{ container: cont, adapter: a, @@ -141,8 +139,8 @@ func NewAttributeFactory(cont *containers.Container, a ref.TypeAdapter, p ref.Ty type attrFactory struct { container *containers.Container - adapter ref.TypeAdapter - provider ref.TypeProvider + adapter types.Adapter + provider types.Provider } // AbsoluteAttribute refers to a variable value and an optional qualifier path. @@ -199,13 +197,13 @@ func (r *attrFactory) RelativeAttribute(id int64, operand Interpretable) Attribu } // NewQualifier is an implementation of the AttributeFactory interface. -func (r *attrFactory) NewQualifier(objType *exprpb.Type, qualID int64, val any, opt bool) (Qualifier, error) { +func (r *attrFactory) NewQualifier(objType *types.Type, qualID int64, val any, opt bool) (Qualifier, error) { // Before creating a new qualifier check to see if this is a protobuf message field access. // If so, use the precomputed GetFrom qualification method rather than the standard // stringQualifier. str, isStr := val.(string) - if isStr && objType != nil && objType.GetMessageType() != "" { - ft, found := r.provider.FindFieldType(objType.GetMessageType(), str) + if isStr && objType != nil && objType.Kind() == types.StructKind { + ft, found := r.provider.FindStructFieldType(objType.TypeName(), str) if found && ft.IsSet != nil && ft.GetFrom != nil { return &fieldQualifier{ id: qualID, @@ -225,8 +223,8 @@ type absoluteAttribute struct { // (package) of the expression. namespaceNames []string qualifiers []Qualifier - adapter ref.TypeAdapter - provider ref.TypeProvider + adapter types.Adapter + provider types.Provider fac AttributeFactory } @@ -325,7 +323,7 @@ type conditionalAttribute struct { expr Interpretable truthy Attribute falsy Attribute - adapter ref.TypeAdapter + adapter types.Adapter fac AttributeFactory } @@ -393,8 +391,8 @@ func (a *conditionalAttribute) String() string { type maybeAttribute struct { id int64 attrs []NamespacedAttribute - adapter ref.TypeAdapter - provider ref.TypeProvider + adapter types.Adapter + provider types.Provider fac AttributeFactory } @@ -511,7 +509,7 @@ type relativeAttribute struct { id int64 operand Interpretable qualifiers []Qualifier - adapter ref.TypeAdapter + adapter types.Adapter fac AttributeFactory } @@ -576,7 +574,7 @@ func (a *relativeAttribute) String() string { return fmt.Sprintf("id: %v, operand: %v", a.id, a.operand) } -func newQualifier(adapter ref.TypeAdapter, id int64, v any, opt bool) (Qualifier, error) { +func newQualifier(adapter types.Adapter, id int64, v any, opt bool) (Qualifier, error) { var qual Qualifier switch val := v.(type) { case Attribute: @@ -657,7 +655,7 @@ func newQualifier(adapter ref.TypeAdapter, id int64, v any, opt bool) (Qualifier qual = &doubleQualifier{ id: id, value: float64(val), celValue: val, adapter: adapter, optional: opt, } - case types.Unknown: + case *types.Unknown: qual = &unknownQualifier{id: id, value: val} default: if q, ok := v.(Qualifier); ok { @@ -689,7 +687,7 @@ type stringQualifier struct { id int64 value string celValue ref.Val - adapter ref.TypeAdapter + adapter types.Adapter optional bool } @@ -790,7 +788,7 @@ type intQualifier struct { id int64 value int64 celValue ref.Val - adapter ref.TypeAdapter + adapter types.Adapter optional bool } @@ -917,7 +915,7 @@ type uintQualifier struct { id int64 value uint64 celValue ref.Val - adapter ref.TypeAdapter + adapter types.Adapter optional bool } @@ -982,7 +980,7 @@ type boolQualifier struct { id int64 value bool celValue ref.Val - adapter ref.TypeAdapter + adapter types.Adapter optional bool } @@ -1035,8 +1033,8 @@ func (q *boolQualifier) Value() ref.Val { type fieldQualifier struct { id int64 Name string - FieldType *ref.FieldType - adapter ref.TypeAdapter + FieldType *types.FieldType + adapter types.Adapter optional bool } @@ -1094,7 +1092,7 @@ type doubleQualifier struct { id int64 value float64 celValue ref.Val - adapter ref.TypeAdapter + adapter types.Adapter optional bool } @@ -1131,7 +1129,7 @@ func (q *doubleQualifier) Value() ref.Val { // for any value subject to qualification. This is consistent with CEL's unknown handling elsewhere. type unknownQualifier struct { id int64 - value types.Unknown + value *types.Unknown } // ID is an implementation of the Qualifier interface method. @@ -1225,10 +1223,10 @@ func attrQualifyIfPresent(fac AttributeFactory, vars Activation, obj any, qualAt // refQualify attempts to convert the value to a CEL value and then uses reflection methods to try and // apply the qualifier with the option to presence test field accesses before retrieving field values. -func refQualify(adapter ref.TypeAdapter, obj any, idx ref.Val, presenceTest, presenceOnly bool) (ref.Val, bool, error) { +func refQualify(adapter types.Adapter, obj any, idx ref.Val, presenceTest, presenceOnly bool) (ref.Val, bool, error) { celVal := adapter.NativeToValue(obj) switch v := celVal.(type) { - case types.Unknown: + case *types.Unknown: return v, true, nil case *types.Err: return nil, false, v diff --git a/vendor/github.com/google/cel-go/interpreter/decorators.go b/vendor/github.com/google/cel-go/interpreter/decorators.go index 208487b7d..502db35fc 100644 --- a/vendor/github.com/google/cel-go/interpreter/decorators.go +++ b/vendor/github.com/google/cel-go/interpreter/decorators.go @@ -75,15 +75,13 @@ func decDisableShortcircuits() InterpretableDecorator { switch expr := i.(type) { case *evalOr: return &evalExhaustiveOr{ - id: expr.id, - lhs: expr.lhs, - rhs: expr.rhs, + id: expr.id, + terms: expr.terms, }, nil case *evalAnd: return &evalExhaustiveAnd{ - id: expr.id, - lhs: expr.lhs, - rhs: expr.rhs, + id: expr.id, + terms: expr.terms, }, nil case *evalFold: expr.exhaustive = true diff --git a/vendor/github.com/google/cel-go/interpreter/dispatcher.go b/vendor/github.com/google/cel-go/interpreter/dispatcher.go index febf9d8a8..8f0bdb7b8 100644 --- a/vendor/github.com/google/cel-go/interpreter/dispatcher.go +++ b/vendor/github.com/google/cel-go/interpreter/dispatcher.go @@ -17,7 +17,7 @@ package interpreter import ( "fmt" - "github.com/google/cel-go/interpreter/functions" + "github.com/google/cel-go/common/functions" ) // Dispatcher resolves function calls to their appropriate overload. diff --git a/vendor/github.com/google/cel-go/interpreter/evalstate.go b/vendor/github.com/google/cel-go/interpreter/evalstate.go index cc0d3e6f9..4bdd1fdc7 100644 --- a/vendor/github.com/google/cel-go/interpreter/evalstate.go +++ b/vendor/github.com/google/cel-go/interpreter/evalstate.go @@ -66,7 +66,11 @@ func (s *evalState) Value(exprID int64) (ref.Val, bool) { // SetValue is an implementation of the EvalState interface method. func (s *evalState) SetValue(exprID int64, val ref.Val) { - s.values[exprID] = val + if val == nil { + delete(s.values, exprID) + } else { + s.values[exprID] = val + } } // Reset implements the EvalState interface method. diff --git a/vendor/github.com/google/cel-go/interpreter/formatting.go b/vendor/github.com/google/cel-go/interpreter/formatting.go deleted file mode 100644 index 6a98f6fa5..000000000 --- a/vendor/github.com/google/cel-go/interpreter/formatting.go +++ /dev/null @@ -1,383 +0,0 @@ -// Copyright 2023 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package interpreter - -import ( - "errors" - "fmt" - "strconv" - "strings" - "unicode" - - "github.com/google/cel-go/common/types" - "github.com/google/cel-go/common/types/ref" -) - -type typeVerifier func(int64, ...*types.TypeValue) (bool, error) - -// InterpolateFormattedString checks the syntax and cardinality of any string.format calls present in the expression and reports -// any errors at compile time. -func InterpolateFormattedString(verifier typeVerifier) InterpretableDecorator { - return func(inter Interpretable) (Interpretable, error) { - call, ok := inter.(InterpretableCall) - if !ok { - return inter, nil - } - if call.OverloadID() != "string_format" { - return inter, nil - } - args := call.Args() - if len(args) != 2 { - return nil, fmt.Errorf("wrong number of arguments to string.format (expected 2, got %d)", len(args)) - } - fmtStrInter, ok := args[0].(InterpretableConst) - if !ok { - return inter, nil - } - var fmtArgsInter InterpretableConstructor - fmtArgsInter, ok = args[1].(InterpretableConstructor) - if !ok { - return inter, nil - } - if fmtArgsInter.Type() != types.ListType { - // don't necessarily return an error since the list may be DynType - return inter, nil - } - formatStr := fmtStrInter.Value().Value().(string) - initVals := fmtArgsInter.InitVals() - - formatCheck := &formatCheck{ - args: initVals, - verifier: verifier, - } - // use a placeholder locale, since locale doesn't affect syntax - _, err := ParseFormatString(formatStr, formatCheck, formatCheck, "en_US") - if err != nil { - return nil, err - } - seenArgs := formatCheck.argsRequested - if len(initVals) > seenArgs { - return nil, fmt.Errorf("too many arguments supplied to string.format (expected %d, got %d)", seenArgs, len(initVals)) - } - return inter, nil - } -} - -type formatCheck struct { - args []Interpretable - argsRequested int - curArgIndex int64 - enableCheckArgTypes bool - verifier typeVerifier -} - -func (c *formatCheck) String(arg ref.Val, locale string) (string, error) { - valid, err := verifyString(c.args[c.curArgIndex], c.verifier) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("string clause can only be used on strings, bools, bytes, ints, doubles, maps, lists, types, durations, and timestamps") - } - return "", nil -} - -func (c *formatCheck) Decimal(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("integer clause can only be used on integers") - } - return "", nil -} - -func (c *formatCheck) Fixed(precision *int) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - // we allow StringType since "NaN", "Infinity", and "-Infinity" are also valid values - valid, err := c.verifier(id, types.DoubleType, types.StringType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("fixed-point clause can only be used on doubles") - } - return "", nil - } -} - -func (c *formatCheck) Scientific(precision *int) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.DoubleType, types.StringType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("scientific clause can only be used on doubles") - } - return "", nil - } -} - -func (c *formatCheck) Binary(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType, types.BoolType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("only integers and bools can be formatted as binary") - } - return "", nil -} - -func (c *formatCheck) Hex(useUpper bool) func(ref.Val, string) (string, error) { - return func(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType, types.StringType, types.BytesType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("only integers, byte buffers, and strings can be formatted as hex") - } - return "", nil - } -} - -func (c *formatCheck) Octal(arg ref.Val, locale string) (string, error) { - id := c.args[c.curArgIndex].ID() - valid, err := c.verifier(id, types.IntType, types.UintType) - if err != nil { - return "", err - } - if !valid { - return "", errors.New("octal clause can only be used on integers") - } - return "", nil -} - -func (c *formatCheck) Arg(index int64) (ref.Val, error) { - c.argsRequested++ - c.curArgIndex = index - // return a dummy value - this is immediately passed to back to us - // through one of the FormatCallback functions, so anything will do - return types.Int(0), nil -} - -func (c *formatCheck) ArgSize() int64 { - return int64(len(c.args)) -} - -func verifyString(sub Interpretable, verifier typeVerifier) (bool, error) { - subVerified, err := verifier(sub.ID(), - types.ListType, types.MapType, types.IntType, types.UintType, types.DoubleType, - types.BoolType, types.StringType, types.TimestampType, types.BytesType, types.DurationType, types.TypeType, types.NullType) - if err != nil { - return false, err - } - if !subVerified { - return false, nil - } - con, ok := sub.(InterpretableConstructor) - if ok { - members := con.InitVals() - for _, m := range members { - // recursively verify if we're dealing with a list/map - verified, err := verifyString(m, verifier) - if err != nil { - return false, err - } - if !verified { - return false, nil - } - } - } - return true, nil - -} - -// FormatStringInterpolator is an interface that allows user-defined behavior -// for formatting clause implementations, as well as argument retrieval. -// Each function is expected to support the appropriate types as laid out in -// the string.format documentation, and to return an error if given an inappropriate type. -type FormatStringInterpolator interface { - // String takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a string, or an error if one occurred. - String(ref.Val, string) (string, error) - - // Decimal takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a decimal integer, or an error if one occurred. - Decimal(ref.Val, string) (string, error) - - // Fixed takes an int pointer representing precision (or nil if none was given) and - // returns a function operating in a similar manner to String and Decimal, taking a - // ref.Val and locale and returning the appropriate string. A closure is returned - // so precision can be set without needing an additional function call/configuration. - Fixed(*int) func(ref.Val, string) (string, error) - - // Scientific functions identically to Fixed, except the string returned from the closure - // is expected to be in scientific notation. - Scientific(*int) func(ref.Val, string) (string, error) - - // Binary takes a ref.Val and a string representing the current locale identifier - // and returns the Val formatted as a binary integer, or an error if one occurred. - Binary(ref.Val, string) (string, error) - - // Hex takes a boolean that, if true, indicates the hex string output by the returned - // closure should use uppercase letters for A-F. - Hex(bool) func(ref.Val, string) (string, error) - - // Octal takes a ref.Val and a string representing the current locale identifier and - // returns the Val formatted in octal, or an error if one occurred. - Octal(ref.Val, string) (string, error) -} - -// FormatList is an interface that allows user-defined list-like datatypes to be used -// for formatting clause implementations. -type FormatList interface { - // Arg returns the ref.Val at the given index, or an error if one occurred. - Arg(int64) (ref.Val, error) - // ArgSize returns the length of the argument list. - ArgSize() int64 -} - -type clauseImpl func(ref.Val, string) (string, error) - -// ParseFormatString formats a string according to the string.format syntax, taking the clause implementations -// from the provided FormatCallback and the args from the given FormatList. -func ParseFormatString(formatStr string, callback FormatStringInterpolator, list FormatList, locale string) (string, error) { - i := 0 - argIndex := 0 - var builtStr strings.Builder - for i < len(formatStr) { - if formatStr[i] == '%' { - if i+1 < len(formatStr) && formatStr[i+1] == '%' { - err := builtStr.WriteByte('%') - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i += 2 - continue - } else { - argAny, err := list.Arg(int64(argIndex)) - if err != nil { - return "", err - } - if i+1 >= len(formatStr) { - return "", errors.New("unexpected end of string") - } - if int64(argIndex) >= list.ArgSize() { - return "", fmt.Errorf("index %d out of range", argIndex) - } - numRead, val, refErr := parseAndFormatClause(formatStr[i:], argAny, callback, list, locale) - if refErr != nil { - return "", refErr - } - _, err = builtStr.WriteString(val) - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i += numRead - argIndex++ - } - } else { - err := builtStr.WriteByte(formatStr[i]) - if err != nil { - return "", fmt.Errorf("error writing format string: %w", err) - } - i++ - } - } - return builtStr.String(), nil -} - -// parseAndFormatClause parses the format clause at the start of the given string with val, and returns -// how many characters were consumed and the substituted string form of val, or an error if one occurred. -func parseAndFormatClause(formatStr string, val ref.Val, callback FormatStringInterpolator, list FormatList, locale string) (int, string, error) { - i := 1 - read, formatter, err := parseFormattingClause(formatStr[i:], callback) - i += read - if err != nil { - return -1, "", fmt.Errorf("could not parse formatting clause: %s", err) - } - - valStr, err := formatter(val, locale) - if err != nil { - return -1, "", fmt.Errorf("error during formatting: %s", err) - } - return i, valStr, nil -} - -func parseFormattingClause(formatStr string, callback FormatStringInterpolator) (int, clauseImpl, error) { - i := 0 - read, precision, err := parsePrecision(formatStr[i:]) - i += read - if err != nil { - return -1, nil, fmt.Errorf("error while parsing precision: %w", err) - } - r := rune(formatStr[i]) - i++ - switch r { - case 's': - return i, callback.String, nil - case 'd': - return i, callback.Decimal, nil - case 'f': - return i, callback.Fixed(precision), nil - case 'e': - return i, callback.Scientific(precision), nil - case 'b': - return i, callback.Binary, nil - case 'x', 'X': - return i, callback.Hex(unicode.IsUpper(r)), nil - case 'o': - return i, callback.Octal, nil - default: - return -1, nil, fmt.Errorf("unrecognized formatting clause \"%c\"", r) - } -} - -func parsePrecision(formatStr string) (int, *int, error) { - i := 0 - if formatStr[i] != '.' { - return i, nil, nil - } - i++ - var buffer strings.Builder - for { - if i >= len(formatStr) { - return -1, nil, errors.New("could not find end of precision specifier") - } - if !isASCIIDigit(rune(formatStr[i])) { - break - } - buffer.WriteByte(formatStr[i]) - i++ - } - precision, err := strconv.Atoi(buffer.String()) - if err != nil { - return -1, nil, fmt.Errorf("error while converting precision to integer: %w", err) - } - return i, &precision, nil -} - -func isASCIIDigit(r rune) bool { - return r <= unicode.MaxASCII && unicode.IsDigit(r) -} diff --git a/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel b/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel index 846d11bf4..4a80c3ea0 100644 --- a/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel +++ b/vendor/github.com/google/cel-go/interpreter/functions/BUILD.bazel @@ -7,16 +7,11 @@ package( go_library( name = "go_default_library", - srcs = [ + srcs = [ "functions.go", - "standard.go", ], importpath = "github.com/google/cel-go/interpreter/functions", deps = [ - "//common/operators:go_default_library", - "//common/overloads:go_default_library", - "//common/types:go_default_library", - "//common/types/ref:go_default_library", - "//common/types/traits:go_default_library", + "//common/functions:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/interpreter/functions/functions.go b/vendor/github.com/google/cel-go/interpreter/functions/functions.go index 981601752..21ffb6924 100644 --- a/vendor/github.com/google/cel-go/interpreter/functions/functions.go +++ b/vendor/github.com/google/cel-go/interpreter/functions/functions.go @@ -16,7 +16,7 @@ // interpreter and as declared within the checker#StandardDeclarations. package functions -import "github.com/google/cel-go/common/types/ref" +import fn "github.com/google/cel-go/common/functions" // Overload defines a named overload of a function, indicating an operand trait // which must be present on the first argument to the overload as well as one @@ -26,37 +26,14 @@ import "github.com/google/cel-go/common/types/ref" // and the specializations simplify the call contract for implementers of // types with operator overloads. Any added complexity is assumed to be handled // by the generic FunctionOp. -type Overload struct { - // Operator name as written in an expression or defined within - // operators.go. - Operator string - - // Operand trait used to dispatch the call. The zero-value indicates a - // global function overload or that one of the Unary / Binary / Function - // definitions should be used to execute the call. - OperandTrait int - - // Unary defines the overload with a UnaryOp implementation. May be nil. - Unary UnaryOp - - // Binary defines the overload with a BinaryOp implementation. May be nil. - Binary BinaryOp - - // Function defines the overload with a FunctionOp implementation. May be - // nil. - Function FunctionOp - - // NonStrict specifies whether the Overload will tolerate arguments that - // are types.Err or types.Unknown. - NonStrict bool -} +type Overload = fn.Overload // UnaryOp is a function that takes a single value and produces an output. -type UnaryOp func(value ref.Val) ref.Val +type UnaryOp = fn.UnaryOp // BinaryOp is a function that takes two values and produces an output. -type BinaryOp func(lhs ref.Val, rhs ref.Val) ref.Val +type BinaryOp = fn.BinaryOp // FunctionOp is a function with accepts zero or more arguments and produces // a value or error as a result. -type FunctionOp func(values ...ref.Val) ref.Val +type FunctionOp = fn.FunctionOp diff --git a/vendor/github.com/google/cel-go/interpreter/functions/standard.go b/vendor/github.com/google/cel-go/interpreter/functions/standard.go deleted file mode 100644 index 73e936114..000000000 --- a/vendor/github.com/google/cel-go/interpreter/functions/standard.go +++ /dev/null @@ -1,270 +0,0 @@ -// Copyright 2018 Google LLC -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package functions - -import ( - "github.com/google/cel-go/common/operators" - "github.com/google/cel-go/common/overloads" - "github.com/google/cel-go/common/types" - "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/common/types/traits" -) - -// StandardOverloads returns the definitions of the built-in overloads. -func StandardOverloads() []*Overload { - return []*Overload{ - // Logical not (!a) - { - Operator: operators.LogicalNot, - OperandTrait: traits.NegatorType, - Unary: func(value ref.Val) ref.Val { - if !types.IsBool(value) { - return types.ValOrErr(value, "no such overload") - } - return value.(traits.Negater).Negate() - }}, - // Not strictly false: IsBool(a) ? a : true - { - Operator: operators.NotStrictlyFalse, - Unary: notStrictlyFalse}, - // Deprecated: not strictly false, may be overridden in the environment. - { - Operator: operators.OldNotStrictlyFalse, - Unary: notStrictlyFalse}, - - // Less than operator - {Operator: operators.Less, - OperandTrait: traits.ComparerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - cmp := lhs.(traits.Comparer).Compare(rhs) - if cmp == types.IntNegOne { - return types.True - } - if cmp == types.IntOne || cmp == types.IntZero { - return types.False - } - return cmp - }}, - - // Less than or equal operator - {Operator: operators.LessEquals, - OperandTrait: traits.ComparerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - cmp := lhs.(traits.Comparer).Compare(rhs) - if cmp == types.IntNegOne || cmp == types.IntZero { - return types.True - } - if cmp == types.IntOne { - return types.False - } - return cmp - }}, - - // Greater than operator - {Operator: operators.Greater, - OperandTrait: traits.ComparerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - cmp := lhs.(traits.Comparer).Compare(rhs) - if cmp == types.IntOne { - return types.True - } - if cmp == types.IntNegOne || cmp == types.IntZero { - return types.False - } - return cmp - }}, - - // Greater than equal operators - {Operator: operators.GreaterEquals, - OperandTrait: traits.ComparerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - cmp := lhs.(traits.Comparer).Compare(rhs) - if cmp == types.IntOne || cmp == types.IntZero { - return types.True - } - if cmp == types.IntNegOne { - return types.False - } - return cmp - }}, - - // Add operator - {Operator: operators.Add, - OperandTrait: traits.AdderType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Adder).Add(rhs) - }}, - - // Subtract operators - {Operator: operators.Subtract, - OperandTrait: traits.SubtractorType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Subtractor).Subtract(rhs) - }}, - - // Multiply operator - {Operator: operators.Multiply, - OperandTrait: traits.MultiplierType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Multiplier).Multiply(rhs) - }}, - - // Divide operator - {Operator: operators.Divide, - OperandTrait: traits.DividerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Divider).Divide(rhs) - }}, - - // Modulo operator - {Operator: operators.Modulo, - OperandTrait: traits.ModderType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Modder).Modulo(rhs) - }}, - - // Negate operator - {Operator: operators.Negate, - OperandTrait: traits.NegatorType, - Unary: func(value ref.Val) ref.Val { - if types.IsBool(value) { - return types.ValOrErr(value, "no such overload") - } - return value.(traits.Negater).Negate() - }}, - - // Index operator - {Operator: operators.Index, - OperandTrait: traits.IndexerType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Indexer).Get(rhs) - }}, - - // Size function - {Operator: overloads.Size, - OperandTrait: traits.SizerType, - Unary: func(value ref.Val) ref.Val { - return value.(traits.Sizer).Size() - }}, - - // In operator - {Operator: operators.In, Binary: inAggregate}, - // Deprecated: in operator, may be overridden in the environment. - {Operator: operators.OldIn, Binary: inAggregate}, - - // Matches function - {Operator: overloads.Matches, - OperandTrait: traits.MatcherType, - Binary: func(lhs ref.Val, rhs ref.Val) ref.Val { - return lhs.(traits.Matcher).Match(rhs) - }}, - - // Type conversion functions - // TODO: verify type conversion safety of numeric values. - - // Int conversions. - {Operator: overloads.TypeConvertInt, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.IntType) - }}, - - // Uint conversions. - {Operator: overloads.TypeConvertUint, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.UintType) - }}, - - // Double conversions. - {Operator: overloads.TypeConvertDouble, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.DoubleType) - }}, - - // Bool conversions. - {Operator: overloads.TypeConvertBool, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.BoolType) - }}, - - // Bytes conversions. - {Operator: overloads.TypeConvertBytes, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.BytesType) - }}, - - // String conversions. - {Operator: overloads.TypeConvertString, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.StringType) - }}, - - // Timestamp conversions. - {Operator: overloads.TypeConvertTimestamp, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.TimestampType) - }}, - - // Duration conversions. - {Operator: overloads.TypeConvertDuration, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.DurationType) - }}, - - // Type operations. - {Operator: overloads.TypeConvertType, - Unary: func(value ref.Val) ref.Val { - return value.ConvertToType(types.TypeType) - }}, - - // Dyn conversion (identity function). - {Operator: overloads.TypeConvertDyn, - Unary: func(value ref.Val) ref.Val { - return value - }}, - - {Operator: overloads.Iterator, - OperandTrait: traits.IterableType, - Unary: func(value ref.Val) ref.Val { - return value.(traits.Iterable).Iterator() - }}, - - {Operator: overloads.HasNext, - OperandTrait: traits.IteratorType, - Unary: func(value ref.Val) ref.Val { - return value.(traits.Iterator).HasNext() - }}, - - {Operator: overloads.Next, - OperandTrait: traits.IteratorType, - Unary: func(value ref.Val) ref.Val { - return value.(traits.Iterator).Next() - }}, - } - -} - -func notStrictlyFalse(value ref.Val) ref.Val { - if types.IsBool(value) { - return value - } - return types.True -} - -func inAggregate(lhs ref.Val, rhs ref.Val) ref.Val { - if rhs.Type().HasTrait(traits.ContainerType) { - return rhs.(traits.Container).Contains(lhs) - } - return types.ValOrErr(rhs, "no such overload") -} diff --git a/vendor/github.com/google/cel-go/interpreter/interpretable.go b/vendor/github.com/google/cel-go/interpreter/interpretable.go index 32e2bcb7d..c4598dfa7 100644 --- a/vendor/github.com/google/cel-go/interpreter/interpretable.go +++ b/vendor/github.com/google/cel-go/interpreter/interpretable.go @@ -17,12 +17,12 @@ package interpreter import ( "fmt" + "github.com/google/cel-go/common/functions" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - "github.com/google/cel-go/interpreter/functions" ) // Interpretable can accept a given Activation and produce a value along with @@ -52,7 +52,7 @@ type InterpretableAttribute interface { Attr() Attribute // Adapter returns the type adapter to be used for adapting resolved Attribute values. - Adapter() ref.TypeAdapter + Adapter() types.Adapter // AddQualifier proxies the Attribute.AddQualifier method. // @@ -202,9 +202,8 @@ func (cons *evalConst) Value() ref.Val { } type evalOr struct { - id int64 - lhs Interpretable - rhs Interpretable + id int64 + terms []Interpretable } // ID implements the Interpretable interface method. @@ -214,41 +213,39 @@ func (or *evalOr) ID() int64 { // Eval implements the Interpretable interface method. func (or *evalOr) Eval(ctx Activation) ref.Val { - // short-circuit lhs. - lVal := or.lhs.Eval(ctx) - lBool, lok := lVal.(types.Bool) - if lok && lBool == types.True { - return types.True - } - // short-circuit on rhs. - rVal := or.rhs.Eval(ctx) - rBool, rok := rVal.(types.Bool) - if rok && rBool == types.True { - return types.True - } - // return if both sides are bool false. - if lok && rok { - return types.False + var err ref.Val = nil + var unk *types.Unknown + for _, term := range or.terms { + val := term.Eval(ctx) + boolVal, ok := val.(types.Bool) + // short-circuit on true. + if ok && boolVal == types.True { + return types.True + } + if !ok { + isUnk := false + unk, isUnk = types.MaybeMergeUnknowns(val, unk) + if !isUnk && err == nil { + if types.IsError(val) { + err = val + } else { + err = types.MaybeNoSuchOverloadErr(val) + } + } + } } - // TODO: return both values as a set if both are unknown or error. - // prefer left unknown to right unknown. - if types.IsUnknown(lVal) { - return lVal + if unk != nil { + return unk } - if types.IsUnknown(rVal) { - return rVal - } - // If the left-hand side is non-boolean return it as the error. - if types.IsError(lVal) { - return lVal + if err != nil { + return err } - return types.ValOrErr(rVal, "no such overload") + return types.False } type evalAnd struct { - id int64 - lhs Interpretable - rhs Interpretable + id int64 + terms []Interpretable } // ID implements the Interpretable interface method. @@ -258,35 +255,34 @@ func (and *evalAnd) ID() int64 { // Eval implements the Interpretable interface method. func (and *evalAnd) Eval(ctx Activation) ref.Val { - // short-circuit lhs. - lVal := and.lhs.Eval(ctx) - lBool, lok := lVal.(types.Bool) - if lok && lBool == types.False { - return types.False - } - // short-circuit on rhs. - rVal := and.rhs.Eval(ctx) - rBool, rok := rVal.(types.Bool) - if rok && rBool == types.False { - return types.False - } - // return if both sides are bool true. - if lok && rok { - return types.True - } - // TODO: return both values as a set if both are unknown or error. - // prefer left unknown to right unknown. - if types.IsUnknown(lVal) { - return lVal + var err ref.Val = nil + var unk *types.Unknown + for _, term := range and.terms { + val := term.Eval(ctx) + boolVal, ok := val.(types.Bool) + // short-circuit on false. + if ok && boolVal == types.False { + return types.False + } + if !ok { + isUnk := false + unk, isUnk = types.MaybeMergeUnknowns(val, unk) + if !isUnk && err == nil { + if types.IsError(val) { + err = val + } else { + err = types.MaybeNoSuchOverloadErr(val) + } + } + } } - if types.IsUnknown(rVal) { - return rVal + if unk != nil { + return unk } - // If the left-hand side is non-boolean return it as the error. - if types.IsError(lVal) { - return lVal + if err != nil { + return err } - return types.ValOrErr(rVal, "no such overload") + return types.True } type evalEq struct { @@ -579,7 +575,7 @@ type evalList struct { elems []Interpretable optionals []bool hasOptionals bool - adapter ref.TypeAdapter + adapter types.Adapter } // ID implements the Interpretable interface method. @@ -625,7 +621,7 @@ type evalMap struct { vals []Interpretable optionals []bool hasOptionals bool - adapter ref.TypeAdapter + adapter types.Adapter } // ID implements the Interpretable interface method. @@ -689,7 +685,7 @@ type evalObj struct { vals []Interpretable optionals []bool hasOptionals bool - provider ref.TypeProvider + provider types.Provider } // ID implements the Interpretable interface method. @@ -739,7 +735,7 @@ type evalFold struct { cond Interpretable step Interpretable result Interpretable - adapter ref.TypeAdapter + adapter types.Adapter exhaustive bool interruptable bool } @@ -865,18 +861,40 @@ type evalWatchAttr struct { // AddQualifier creates a wrapper over the incoming qualifier which observes the qualification // result. func (e *evalWatchAttr) AddQualifier(q Qualifier) (Attribute, error) { - cq, isConst := q.(ConstantQualifier) - if isConst { + switch qual := q.(type) { + // By default, the qualifier is either a constant or an attribute + // There may be some custom cases where the attribute is neither. + case ConstantQualifier: + // Expose a method to test whether the qualifier matches the input pattern. q = &evalWatchConstQual{ - ConstantQualifier: cq, + ConstantQualifier: qual, observer: e.observer, - adapter: e.InterpretableAttribute.Adapter(), + adapter: e.Adapter(), } - } else { + case *evalWatchAttr: + // Unwrap the evalWatchAttr since the observation will be applied during Qualify or + // QualifyIfPresent rather than Eval. + q = &evalWatchAttrQual{ + Attribute: qual.InterpretableAttribute, + observer: e.observer, + adapter: e.Adapter(), + } + case Attribute: + // Expose methods which intercept the qualification prior to being applied as a qualifier. + // Using this interface ensures that the qualifier is converted to a constant value one + // time during attribute pattern matching as the method embeds the Attribute interface + // needed to trip the conversion to a constant. + q = &evalWatchAttrQual{ + Attribute: qual, + observer: e.observer, + adapter: e.Adapter(), + } + default: + // This is likely a custom qualifier type. q = &evalWatchQual{ - Qualifier: q, + Qualifier: qual, observer: e.observer, - adapter: e.InterpretableAttribute.Adapter(), + adapter: e.Adapter(), } } _, err := e.InterpretableAttribute.AddQualifier(q) @@ -895,7 +913,7 @@ func (e *evalWatchAttr) Eval(vars Activation) ref.Val { type evalWatchConstQual struct { ConstantQualifier observer EvalObserver - adapter ref.TypeAdapter + adapter types.Adapter } // Qualify observes the qualification of a object via a constant boolean, int, string, or uint. @@ -934,11 +952,48 @@ func (e *evalWatchConstQual) QualifierValueEquals(value any) bool { return ok && qve.QualifierValueEquals(value) } +// evalWatchAttrQual observes the qualification of an object by a value computed at runtime. +type evalWatchAttrQual struct { + Attribute + observer EvalObserver + adapter ref.TypeAdapter +} + +// Qualify observes the qualification of a object via a value computed at runtime. +func (e *evalWatchAttrQual) Qualify(vars Activation, obj any) (any, error) { + out, err := e.Attribute.Qualify(vars, obj) + var val ref.Val + if err != nil { + val = types.WrapErr(err) + } else { + val = e.adapter.NativeToValue(out) + } + e.observer(e.ID(), e.Attribute, val) + return out, err +} + +// QualifyIfPresent conditionally qualifies the variable and only records a value if one is present. +func (e *evalWatchAttrQual) QualifyIfPresent(vars Activation, obj any, presenceOnly bool) (any, bool, error) { + out, present, err := e.Attribute.QualifyIfPresent(vars, obj, presenceOnly) + var val ref.Val + if err != nil { + val = types.WrapErr(err) + } else if out != nil { + val = e.adapter.NativeToValue(out) + } else if presenceOnly { + val = types.Bool(present) + } + if present || presenceOnly { + e.observer(e.ID(), e.Attribute, val) + } + return out, present, err +} + // evalWatchQual observes the qualification of an object by a value computed at runtime. type evalWatchQual struct { Qualifier observer EvalObserver - adapter ref.TypeAdapter + adapter types.Adapter } // Qualify observes the qualification of a object via a value computed at runtime. @@ -986,9 +1041,8 @@ func (e *evalWatchConst) Eval(vars Activation) ref.Val { // evalExhaustiveOr is just like evalOr, but does not short-circuit argument evaluation. type evalExhaustiveOr struct { - id int64 - lhs Interpretable - rhs Interpretable + id int64 + terms []Interpretable } // ID implements the Interpretable interface method. @@ -998,38 +1052,44 @@ func (or *evalExhaustiveOr) ID() int64 { // Eval implements the Interpretable interface method. func (or *evalExhaustiveOr) Eval(ctx Activation) ref.Val { - lVal := or.lhs.Eval(ctx) - rVal := or.rhs.Eval(ctx) - lBool, lok := lVal.(types.Bool) - if lok && lBool == types.True { - return types.True + var err ref.Val = nil + var unk *types.Unknown + isTrue := false + for _, term := range or.terms { + val := term.Eval(ctx) + boolVal, ok := val.(types.Bool) + // flag the result as true + if ok && boolVal == types.True { + isTrue = true + } + if !ok && !isTrue { + isUnk := false + unk, isUnk = types.MaybeMergeUnknowns(val, unk) + if !isUnk && err == nil { + if types.IsError(val) { + err = val + } else { + err = types.MaybeNoSuchOverloadErr(val) + } + } + } } - rBool, rok := rVal.(types.Bool) - if rok && rBool == types.True { + if isTrue { return types.True } - if lok && rok { - return types.False - } - if types.IsUnknown(lVal) { - return lVal - } - if types.IsUnknown(rVal) { - return rVal + if unk != nil { + return unk } - // TODO: Combine the errors into a set in the future. - // If the left-hand side is non-boolean return it as the error. - if types.IsError(lVal) { - return lVal + if err != nil { + return err } - return types.MaybeNoSuchOverloadErr(rVal) + return types.False } // evalExhaustiveAnd is just like evalAnd, but does not short-circuit argument evaluation. type evalExhaustiveAnd struct { - id int64 - lhs Interpretable - rhs Interpretable + id int64 + terms []Interpretable } // ID implements the Interpretable interface method. @@ -1039,38 +1099,45 @@ func (and *evalExhaustiveAnd) ID() int64 { // Eval implements the Interpretable interface method. func (and *evalExhaustiveAnd) Eval(ctx Activation) ref.Val { - lVal := and.lhs.Eval(ctx) - rVal := and.rhs.Eval(ctx) - lBool, lok := lVal.(types.Bool) - if lok && lBool == types.False { - return types.False + var err ref.Val = nil + var unk *types.Unknown + isFalse := false + for _, term := range and.terms { + val := term.Eval(ctx) + boolVal, ok := val.(types.Bool) + // short-circuit on false. + if ok && boolVal == types.False { + isFalse = true + } + if !ok && !isFalse { + isUnk := false + unk, isUnk = types.MaybeMergeUnknowns(val, unk) + if !isUnk && err == nil { + if types.IsError(val) { + err = val + } else { + err = types.MaybeNoSuchOverloadErr(val) + } + } + } } - rBool, rok := rVal.(types.Bool) - if rok && rBool == types.False { + if isFalse { return types.False } - if lok && rok { - return types.True - } - if types.IsUnknown(lVal) { - return lVal - } - if types.IsUnknown(rVal) { - return rVal + if unk != nil { + return unk } - // TODO: Combine the errors into a set in the future. - // If the left-hand side is non-boolean return it as the error. - if types.IsError(lVal) { - return lVal + if err != nil { + return err } - return types.MaybeNoSuchOverloadErr(rVal) + return types.True } // evalExhaustiveConditional is like evalConditional, but does not short-circuit argument // evaluation. type evalExhaustiveConditional struct { id int64 - adapter ref.TypeAdapter + adapter types.Adapter attr *conditionalAttribute } @@ -1102,7 +1169,7 @@ func (cond *evalExhaustiveConditional) Eval(ctx Activation) ref.Val { // evalAttr evaluates an Attribute value. type evalAttr struct { - adapter ref.TypeAdapter + adapter types.Adapter attr Attribute optional bool } @@ -1127,7 +1194,7 @@ func (a *evalAttr) Attr() Attribute { } // Adapter implements the InterpretableAttribute interface method. -func (a *evalAttr) Adapter() ref.TypeAdapter { +func (a *evalAttr) Adapter() types.Adapter { return a.adapter } diff --git a/vendor/github.com/google/cel-go/interpreter/interpreter.go b/vendor/github.com/google/cel-go/interpreter/interpreter.go index 707a6105a..0aca74d88 100644 --- a/vendor/github.com/google/cel-go/interpreter/interpreter.go +++ b/vendor/github.com/google/cel-go/interpreter/interpreter.go @@ -18,22 +18,17 @@ package interpreter import ( + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/interpreter/functions" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" ) // Interpreter generates a new Interpretable from a checked or unchecked expression. type Interpreter interface { // NewInterpretable creates an Interpretable from a checked expression and an // optional list of InterpretableDecorator values. - NewInterpretable(checked *exprpb.CheckedExpr, decorators ...InterpretableDecorator) (Interpretable, error) - - // NewUncheckedInterpretable returns an Interpretable from a parsed expression - // and an optional list of InterpretableDecorator values. - NewUncheckedInterpretable(expr *exprpb.Expr, decorators ...InterpretableDecorator) (Interpretable, error) + NewInterpretable(exprAST *ast.AST, decorators ...InterpretableDecorator) (Interpretable, error) } // EvalObserver is a functional interface that accepts an expression id and an observed value. @@ -154,8 +149,8 @@ func CompileRegexConstants(regexOptimizations ...*RegexOptimization) Interpretab type exprInterpreter struct { dispatcher Dispatcher container *containers.Container - provider ref.TypeProvider - adapter ref.TypeAdapter + provider types.Provider + adapter types.Adapter attrFactory AttributeFactory } @@ -163,8 +158,8 @@ type exprInterpreter struct { // throughout the Eval of all Interpretable instances generated from it. func NewInterpreter(dispatcher Dispatcher, container *containers.Container, - provider ref.TypeProvider, - adapter ref.TypeAdapter, + provider types.Provider, + adapter types.Adapter, attrFactory AttributeFactory) Interpreter { return &exprInterpreter{ dispatcher: dispatcher, @@ -174,20 +169,9 @@ func NewInterpreter(dispatcher Dispatcher, attrFactory: attrFactory} } -// NewStandardInterpreter builds a Dispatcher and TypeProvider with support for all of the CEL -// builtins defined in the language definition. -func NewStandardInterpreter(container *containers.Container, - provider ref.TypeProvider, - adapter ref.TypeAdapter, - resolver AttributeFactory) Interpreter { - dispatcher := NewDispatcher() - dispatcher.Add(functions.StandardOverloads()...) - return NewInterpreter(dispatcher, container, provider, adapter, resolver) -} - // NewIntepretable implements the Interpreter interface method. func (i *exprInterpreter) NewInterpretable( - checked *exprpb.CheckedExpr, + checked *ast.AST, decorators ...InterpretableDecorator) (Interpretable, error) { p := newPlanner( i.dispatcher, @@ -197,19 +181,5 @@ func (i *exprInterpreter) NewInterpretable( i.container, checked, decorators...) - return p.Plan(checked.GetExpr()) -} - -// NewUncheckedIntepretable implements the Interpreter interface method. -func (i *exprInterpreter) NewUncheckedInterpretable( - expr *exprpb.Expr, - decorators ...InterpretableDecorator) (Interpretable, error) { - p := newUncheckedPlanner( - i.dispatcher, - i.provider, - i.adapter, - i.attrFactory, - i.container, - decorators...) - return p.Plan(expr) + return p.Plan(checked.Expr()) } diff --git a/vendor/github.com/google/cel-go/interpreter/planner.go b/vendor/github.com/google/cel-go/interpreter/planner.go index 0b65d0fa9..cf371f95d 100644 --- a/vendor/github.com/google/cel-go/interpreter/planner.go +++ b/vendor/github.com/google/cel-go/interpreter/planner.go @@ -18,18 +18,17 @@ import ( "fmt" "strings" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/containers" + "github.com/google/cel-go/common/functions" "github.com/google/cel-go/common/operators" - "github.com/google/cel-go/common/types/ref" - "github.com/google/cel-go/interpreter/functions" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" ) // interpretablePlanner creates an Interpretable evaluation plan from a proto Expr value. type interpretablePlanner interface { // Plan generates an Interpretable value (or error) from the input proto Expr. - Plan(expr *exprpb.Expr) (Interpretable, error) + Plan(expr ast.Expr) (Interpretable, error) } // newPlanner creates an interpretablePlanner which references a Dispatcher, TypeProvider, @@ -37,11 +36,11 @@ type interpretablePlanner interface { // functions, types, and namespaced identifiers at plan time rather than at runtime since // it only needs to be done once and may be semi-expensive to compute. func newPlanner(disp Dispatcher, - provider ref.TypeProvider, - adapter ref.TypeAdapter, + provider types.Provider, + adapter types.Adapter, attrFactory AttributeFactory, cont *containers.Container, - checked *exprpb.CheckedExpr, + exprAST *ast.AST, decorators ...InterpretableDecorator) interpretablePlanner { return &planner{ disp: disp, @@ -49,29 +48,8 @@ func newPlanner(disp Dispatcher, adapter: adapter, attrFactory: attrFactory, container: cont, - refMap: checked.GetReferenceMap(), - typeMap: checked.GetTypeMap(), - decorators: decorators, - } -} - -// newUncheckedPlanner creates an interpretablePlanner which references a Dispatcher, TypeProvider, -// TypeAdapter, and Container to resolve functions and types at plan time. Namespaces present in -// Select expressions are resolved lazily at evaluation time. -func newUncheckedPlanner(disp Dispatcher, - provider ref.TypeProvider, - adapter ref.TypeAdapter, - attrFactory AttributeFactory, - cont *containers.Container, - decorators ...InterpretableDecorator) interpretablePlanner { - return &planner{ - disp: disp, - provider: provider, - adapter: adapter, - attrFactory: attrFactory, - container: cont, - refMap: make(map[int64]*exprpb.Reference), - typeMap: make(map[int64]*exprpb.Type), + refMap: exprAST.ReferenceMap(), + typeMap: exprAST.TypeMap(), decorators: decorators, } } @@ -79,12 +57,12 @@ func newUncheckedPlanner(disp Dispatcher, // planner is an implementation of the interpretablePlanner interface. type planner struct { disp Dispatcher - provider ref.TypeProvider - adapter ref.TypeAdapter + provider types.Provider + adapter types.Adapter attrFactory AttributeFactory container *containers.Container - refMap map[int64]*exprpb.Reference - typeMap map[int64]*exprpb.Type + refMap map[int64]*ast.ReferenceInfo + typeMap map[int64]*types.Type decorators []InterpretableDecorator } @@ -93,22 +71,24 @@ type planner struct { // useful for layering functionality into the evaluation that is not natively understood by CEL, // such as state-tracking, expression re-write, and possibly efficient thread-safe memoization of // repeated expressions. -func (p *planner) Plan(expr *exprpb.Expr) (Interpretable, error) { - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: +func (p *planner) Plan(expr ast.Expr) (Interpretable, error) { + switch expr.Kind() { + case ast.CallKind: return p.decorate(p.planCall(expr)) - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: return p.decorate(p.planIdent(expr)) - case *exprpb.Expr_SelectExpr: + case ast.LiteralKind: + return p.decorate(p.planConst(expr)) + case ast.SelectKind: return p.decorate(p.planSelect(expr)) - case *exprpb.Expr_ListExpr: + case ast.ListKind: return p.decorate(p.planCreateList(expr)) - case *exprpb.Expr_StructExpr: + case ast.MapKind: + return p.decorate(p.planCreateMap(expr)) + case ast.StructKind: return p.decorate(p.planCreateStruct(expr)) - case *exprpb.Expr_ComprehensionExpr: + case ast.ComprehensionKind: return p.decorate(p.planComprehension(expr)) - case *exprpb.Expr_ConstExpr: - return p.decorate(p.planConst(expr)) } return nil, fmt.Errorf("unsupported expr: %v", expr) } @@ -130,35 +110,32 @@ func (p *planner) decorate(i Interpretable, err error) (Interpretable, error) { } // planIdent creates an Interpretable that resolves an identifier from an Activation. -func (p *planner) planIdent(expr *exprpb.Expr) (Interpretable, error) { +func (p *planner) planIdent(expr ast.Expr) (Interpretable, error) { // Establish whether the identifier is in the reference map. - if identRef, found := p.refMap[expr.GetId()]; found { - return p.planCheckedIdent(expr.GetId(), identRef) + if identRef, found := p.refMap[expr.ID()]; found { + return p.planCheckedIdent(expr.ID(), identRef) } // Create the possible attribute list for the unresolved reference. - ident := expr.GetIdentExpr() + ident := expr.AsIdent() return &evalAttr{ adapter: p.adapter, - attr: p.attrFactory.MaybeAttribute(expr.GetId(), ident.Name), + attr: p.attrFactory.MaybeAttribute(expr.ID(), ident), }, nil } -func (p *planner) planCheckedIdent(id int64, identRef *exprpb.Reference) (Interpretable, error) { +func (p *planner) planCheckedIdent(id int64, identRef *ast.ReferenceInfo) (Interpretable, error) { // Plan a constant reference if this is the case for this simple identifier. - if identRef.GetValue() != nil { - return p.Plan(&exprpb.Expr{Id: id, - ExprKind: &exprpb.Expr_ConstExpr{ - ConstExpr: identRef.GetValue(), - }}) + if identRef.Value != nil { + return NewConstValue(id, identRef.Value), nil } // Check to see whether the type map indicates this is a type name. All types should be // registered with the provider. cType := p.typeMap[id] - if cType.GetType() != nil { - cVal, found := p.provider.FindIdent(identRef.GetName()) + if cType.Kind() == types.TypeKind { + cVal, found := p.provider.FindIdent(identRef.Name) if !found { - return nil, fmt.Errorf("reference to undefined type: %s", identRef.GetName()) + return nil, fmt.Errorf("reference to undefined type: %s", identRef.Name) } return NewConstValue(id, cVal), nil } @@ -166,7 +143,7 @@ func (p *planner) planCheckedIdent(id int64, identRef *exprpb.Reference) (Interp // Otherwise, return the attribute for the resolved identifier name. return &evalAttr{ adapter: p.adapter, - attr: p.attrFactory.AbsoluteAttribute(id, identRef.GetName()), + attr: p.attrFactory.AbsoluteAttribute(id, identRef.Name), }, nil } @@ -175,20 +152,20 @@ func (p *planner) planCheckedIdent(id int64, identRef *exprpb.Reference) (Interp // a) selects a field from a map or proto. // b) creates a field presence test for a select within a has() macro. // c) resolves the select expression to a namespaced identifier. -func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { +func (p *planner) planSelect(expr ast.Expr) (Interpretable, error) { // If the Select id appears in the reference map from the CheckedExpr proto then it is either // a namespaced identifier or enum value. - if identRef, found := p.refMap[expr.GetId()]; found { - return p.planCheckedIdent(expr.GetId(), identRef) + if identRef, found := p.refMap[expr.ID()]; found { + return p.planCheckedIdent(expr.ID(), identRef) } - sel := expr.GetSelectExpr() + sel := expr.AsSelect() // Plan the operand evaluation. - op, err := p.Plan(sel.GetOperand()) + op, err := p.Plan(sel.Operand()) if err != nil { return nil, err } - opType := p.typeMap[sel.GetOperand().GetId()] + opType := p.typeMap[sel.Operand().ID()] // If the Select was marked TestOnly, this is a presence test. // @@ -212,14 +189,14 @@ func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { } // Build a qualifier for the attribute. - qual, err := p.attrFactory.NewQualifier(opType, expr.GetId(), sel.GetField(), false) + qual, err := p.attrFactory.NewQualifier(opType, expr.ID(), sel.FieldName(), false) if err != nil { return nil, err } // Modify the attribute to be test-only. - if sel.GetTestOnly() { + if sel.IsTestOnly() { attr = &evalTestOnly{ - id: expr.GetId(), + id: expr.ID(), InterpretableAttribute: attr, } } @@ -231,10 +208,10 @@ func (p *planner) planSelect(expr *exprpb.Expr) (Interpretable, error) { // planCall creates a callable Interpretable while specializing for common functions and invocation // patterns. Specifically, conditional operators &&, ||, ?:, and (in)equality functions result in // optimized Interpretable values. -func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { - call := expr.GetCallExpr() +func (p *planner) planCall(expr ast.Expr) (Interpretable, error) { + call := expr.AsCall() target, fnName, oName := p.resolveFunction(expr) - argCount := len(call.GetArgs()) + argCount := len(call.Args()) var offset int if target != nil { argCount++ @@ -249,7 +226,7 @@ func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { } args[0] = arg } - for i, argExpr := range call.GetArgs() { + for i, argExpr := range call.Args() { arg, err := p.Plan(argExpr) if err != nil { return nil, err @@ -308,7 +285,7 @@ func (p *planner) planCall(expr *exprpb.Expr) (Interpretable, error) { } // planCallZero generates a zero-arity callable Interpretable. -func (p *planner) planCallZero(expr *exprpb.Expr, +func (p *planner) planCallZero(expr ast.Expr, function string, overload string, impl *functions.Overload) (Interpretable, error) { @@ -316,7 +293,7 @@ func (p *planner) planCallZero(expr *exprpb.Expr, return nil, fmt.Errorf("no such overload: %s()", function) } return &evalZeroArity{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, impl: impl.Function, @@ -324,7 +301,7 @@ func (p *planner) planCallZero(expr *exprpb.Expr, } // planCallUnary generates a unary callable Interpretable. -func (p *planner) planCallUnary(expr *exprpb.Expr, +func (p *planner) planCallUnary(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -341,7 +318,7 @@ func (p *planner) planCallUnary(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalUnary{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, arg: args[0], @@ -352,7 +329,7 @@ func (p *planner) planCallUnary(expr *exprpb.Expr, } // planCallBinary generates a binary callable Interpretable. -func (p *planner) planCallBinary(expr *exprpb.Expr, +func (p *planner) planCallBinary(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -369,7 +346,7 @@ func (p *planner) planCallBinary(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalBinary{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, lhs: args[0], @@ -381,7 +358,7 @@ func (p *planner) planCallBinary(expr *exprpb.Expr, } // planCallVarArgs generates a variable argument callable Interpretable. -func (p *planner) planCallVarArgs(expr *exprpb.Expr, +func (p *planner) planCallVarArgs(expr ast.Expr, function string, overload string, impl *functions.Overload, @@ -398,7 +375,7 @@ func (p *planner) planCallVarArgs(expr *exprpb.Expr, nonStrict = impl.NonStrict } return &evalVarArgs{ - id: expr.GetId(), + id: expr.ID(), function: function, overload: overload, args: args, @@ -409,43 +386,41 @@ func (p *planner) planCallVarArgs(expr *exprpb.Expr, } // planCallEqual generates an equals (==) Interpretable. -func (p *planner) planCallEqual(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalEq{ - id: expr.GetId(), + id: expr.ID(), lhs: args[0], rhs: args[1], }, nil } // planCallNotEqual generates a not equals (!=) Interpretable. -func (p *planner) planCallNotEqual(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallNotEqual(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalNe{ - id: expr.GetId(), + id: expr.ID(), lhs: args[0], rhs: args[1], }, nil } // planCallLogicalAnd generates a logical and (&&) Interpretable. -func (p *planner) planCallLogicalAnd(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallLogicalAnd(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalAnd{ - id: expr.GetId(), - lhs: args[0], - rhs: args[1], + id: expr.ID(), + terms: args, }, nil } // planCallLogicalOr generates a logical or (||) Interpretable. -func (p *planner) planCallLogicalOr(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallLogicalOr(expr ast.Expr, args []Interpretable) (Interpretable, error) { return &evalOr{ - id: expr.GetId(), - lhs: args[0], - rhs: args[1], + id: expr.ID(), + terms: args, }, nil } // planCallConditional generates a conditional / ternary (c ? t : f) Interpretable. -func (p *planner) planCallConditional(expr *exprpb.Expr, args []Interpretable) (Interpretable, error) { +func (p *planner) planCallConditional(expr ast.Expr, args []Interpretable) (Interpretable, error) { cond := args[0] t := args[1] var tAttr Attribute @@ -467,16 +442,16 @@ func (p *planner) planCallConditional(expr *exprpb.Expr, args []Interpretable) ( return &evalAttr{ adapter: p.adapter, - attr: p.attrFactory.ConditionalAttribute(expr.GetId(), cond, tAttr, fAttr), + attr: p.attrFactory.ConditionalAttribute(expr.ID(), cond, tAttr, fAttr), }, nil } // planCallIndex either extends an attribute with the argument to the index operation, or creates // a relative attribute based on the return of a function call or operation. -func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optional bool) (Interpretable, error) { +func (p *planner) planCallIndex(expr ast.Expr, args []Interpretable, optional bool) (Interpretable, error) { op := args[0] ind := args[1] - opType := p.typeMap[expr.GetCallExpr().GetTarget().GetId()] + opType := p.typeMap[op.ID()] // Establish the attribute reference. var err error @@ -492,11 +467,11 @@ func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optiona var qual Qualifier switch ind := ind.(type) { case InterpretableConst: - qual, err = p.attrFactory.NewQualifier(opType, expr.GetId(), ind.Value(), optional) + qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind.Value(), optional) case InterpretableAttribute: - qual, err = p.attrFactory.NewQualifier(opType, expr.GetId(), ind, optional) + qual, err = p.attrFactory.NewQualifier(opType, expr.ID(), ind, optional) default: - qual, err = p.relativeAttr(expr.GetId(), ind, optional) + qual, err = p.relativeAttr(expr.ID(), ind, optional) } if err != nil { return nil, err @@ -508,10 +483,10 @@ func (p *planner) planCallIndex(expr *exprpb.Expr, args []Interpretable, optiona } // planCreateList generates a list construction Interpretable. -func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { - list := expr.GetListExpr() - optionalIndices := list.GetOptionalIndices() - elements := list.GetElements() +func (p *planner) planCreateList(expr ast.Expr) (Interpretable, error) { + list := expr.AsList() + optionalIndices := list.OptionalIndices() + elements := list.Elements() optionals := make([]bool, len(elements)) for _, index := range optionalIndices { if index < 0 || index >= int32(len(elements)) { @@ -528,7 +503,7 @@ func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { elems[i] = elemVal } return &evalList{ - id: expr.GetId(), + id: expr.ID(), elems: elems, optionals: optionals, hasOptionals: len(optionals) != 0, @@ -537,31 +512,29 @@ func (p *planner) planCreateList(expr *exprpb.Expr) (Interpretable, error) { } // planCreateStruct generates a map or object construction Interpretable. -func (p *planner) planCreateStruct(expr *exprpb.Expr) (Interpretable, error) { - str := expr.GetStructExpr() - if len(str.MessageName) != 0 { - return p.planCreateObj(expr) - } - entries := str.GetEntries() +func (p *planner) planCreateMap(expr ast.Expr) (Interpretable, error) { + m := expr.AsMap() + entries := m.Entries() optionals := make([]bool, len(entries)) keys := make([]Interpretable, len(entries)) vals := make([]Interpretable, len(entries)) - for i, entry := range entries { - keyVal, err := p.Plan(entry.GetMapKey()) + for i, e := range entries { + entry := e.AsMapEntry() + keyVal, err := p.Plan(entry.Key()) if err != nil { return nil, err } keys[i] = keyVal - valVal, err := p.Plan(entry.GetValue()) + valVal, err := p.Plan(entry.Value()) if err != nil { return nil, err } vals[i] = valVal - optionals[i] = entry.GetOptionalEntry() + optionals[i] = entry.IsOptional() } return &evalMap{ - id: expr.GetId(), + id: expr.ID(), keys: keys, vals: vals, optionals: optionals, @@ -571,27 +544,28 @@ func (p *planner) planCreateStruct(expr *exprpb.Expr) (Interpretable, error) { } // planCreateObj generates an object construction Interpretable. -func (p *planner) planCreateObj(expr *exprpb.Expr) (Interpretable, error) { - obj := expr.GetStructExpr() - typeName, defined := p.resolveTypeName(obj.GetMessageName()) +func (p *planner) planCreateStruct(expr ast.Expr) (Interpretable, error) { + obj := expr.AsStruct() + typeName, defined := p.resolveTypeName(obj.TypeName()) if !defined { - return nil, fmt.Errorf("unknown type: %s", obj.GetMessageName()) - } - entries := obj.GetEntries() - optionals := make([]bool, len(entries)) - fields := make([]string, len(entries)) - vals := make([]Interpretable, len(entries)) - for i, entry := range entries { - fields[i] = entry.GetFieldKey() - val, err := p.Plan(entry.GetValue()) + return nil, fmt.Errorf("unknown type: %s", obj.TypeName()) + } + objFields := obj.Fields() + optionals := make([]bool, len(objFields)) + fields := make([]string, len(objFields)) + vals := make([]Interpretable, len(objFields)) + for i, f := range objFields { + field := f.AsStructField() + fields[i] = field.Name() + val, err := p.Plan(field.Value()) if err != nil { return nil, err } vals[i] = val - optionals[i] = entry.GetOptionalEntry() + optionals[i] = field.IsOptional() } return &evalObj{ - id: expr.GetId(), + id: expr.ID(), typeName: typeName, fields: fields, vals: vals, @@ -602,33 +576,33 @@ func (p *planner) planCreateObj(expr *exprpb.Expr) (Interpretable, error) { } // planComprehension generates an Interpretable fold operation. -func (p *planner) planComprehension(expr *exprpb.Expr) (Interpretable, error) { - fold := expr.GetComprehensionExpr() - accu, err := p.Plan(fold.GetAccuInit()) +func (p *planner) planComprehension(expr ast.Expr) (Interpretable, error) { + fold := expr.AsComprehension() + accu, err := p.Plan(fold.AccuInit()) if err != nil { return nil, err } - iterRange, err := p.Plan(fold.GetIterRange()) + iterRange, err := p.Plan(fold.IterRange()) if err != nil { return nil, err } - cond, err := p.Plan(fold.GetLoopCondition()) + cond, err := p.Plan(fold.LoopCondition()) if err != nil { return nil, err } - step, err := p.Plan(fold.GetLoopStep()) + step, err := p.Plan(fold.LoopStep()) if err != nil { return nil, err } - result, err := p.Plan(fold.GetResult()) + result, err := p.Plan(fold.Result()) if err != nil { return nil, err } return &evalFold{ - id: expr.GetId(), - accuVar: fold.AccuVar, + id: expr.ID(), + accuVar: fold.AccuVar(), accu: accu, - iterVar: fold.IterVar, + iterVar: fold.IterVar(), iterRange: iterRange, cond: cond, step: step, @@ -638,44 +612,15 @@ func (p *planner) planComprehension(expr *exprpb.Expr) (Interpretable, error) { } // planConst generates a constant valued Interpretable. -func (p *planner) planConst(expr *exprpb.Expr) (Interpretable, error) { - val, err := p.constValue(expr.GetConstExpr()) - if err != nil { - return nil, err - } - return NewConstValue(expr.GetId(), val), nil -} - -// constValue converts a proto Constant value to a ref.Val. -func (p *planner) constValue(c *exprpb.Constant) (ref.Val, error) { - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - return p.adapter.NativeToValue(c.GetBoolValue()), nil - case *exprpb.Constant_BytesValue: - return p.adapter.NativeToValue(c.GetBytesValue()), nil - case *exprpb.Constant_DoubleValue: - return p.adapter.NativeToValue(c.GetDoubleValue()), nil - case *exprpb.Constant_DurationValue: - return p.adapter.NativeToValue(c.GetDurationValue().AsDuration()), nil - case *exprpb.Constant_Int64Value: - return p.adapter.NativeToValue(c.GetInt64Value()), nil - case *exprpb.Constant_NullValue: - return p.adapter.NativeToValue(c.GetNullValue()), nil - case *exprpb.Constant_StringValue: - return p.adapter.NativeToValue(c.GetStringValue()), nil - case *exprpb.Constant_TimestampValue: - return p.adapter.NativeToValue(c.GetTimestampValue().AsTime()), nil - case *exprpb.Constant_Uint64Value: - return p.adapter.NativeToValue(c.GetUint64Value()), nil - } - return nil, fmt.Errorf("unknown constant type: %v", c) +func (p *planner) planConst(expr ast.Expr) (Interpretable, error) { + return NewConstValue(expr.ID(), expr.AsLiteral()), nil } // resolveTypeName takes a qualified string constructed at parse time, applies the proto // namespace resolution rules to it in a scan over possible matching types in the TypeProvider. func (p *planner) resolveTypeName(typeName string) (string, bool) { for _, qualifiedTypeName := range p.container.ResolveCandidateNames(typeName) { - if _, found := p.provider.FindType(qualifiedTypeName); found { + if _, found := p.provider.FindStructType(qualifiedTypeName); found { return qualifiedTypeName, true } } @@ -690,20 +635,23 @@ func (p *planner) resolveTypeName(typeName string) (string, bool) { // - The target expression may only consist of ident and select expressions. // - The function is declared in the environment using its fully-qualified name. // - The fully-qualified function name matches the string serialized target value. -func (p *planner) resolveFunction(expr *exprpb.Expr) (*exprpb.Expr, string, string) { +func (p *planner) resolveFunction(expr ast.Expr) (ast.Expr, string, string) { // Note: similar logic exists within the `checker/checker.go`. If making changes here // please consider the impact on checker.go and consolidate implementations or mirror code // as appropriate. - call := expr.GetCallExpr() - target := call.GetTarget() - fnName := call.GetFunction() + call := expr.AsCall() + var target ast.Expr = nil + if call.IsMemberFunction() { + target = call.Target() + } + fnName := call.FunctionName() // Checked expressions always have a reference map entry, and _should_ have the fully qualified // function name as the fnName value. - oRef, hasOverload := p.refMap[expr.GetId()] + oRef, hasOverload := p.refMap[expr.ID()] if hasOverload { - if len(oRef.GetOverloadId()) == 1 { - return target, fnName, oRef.GetOverloadId()[0] + if len(oRef.OverloadIDs) == 1 { + return target, fnName, oRef.OverloadIDs[0] } // Note, this namespaced function name will not appear as a fully qualified name in ASTs // built and stored before cel-go v0.5.0; however, this functionality did not work at all @@ -774,16 +722,30 @@ func (p *planner) relativeAttr(id int64, eval Interpretable, opt bool) (Interpre // toQualifiedName converts an expression AST into a qualified name if possible, with a boolean // 'found' value that indicates if the conversion is successful. -func (p *planner) toQualifiedName(operand *exprpb.Expr) (string, bool) { +func (p *planner) toQualifiedName(operand ast.Expr) (string, bool) { // If the checker identified the expression as an attribute by the type-checker, then it can't // possibly be part of qualified name in a namespace. - _, isAttr := p.refMap[operand.GetId()] + _, isAttr := p.refMap[operand.ID()] if isAttr { return "", false } // Since functions cannot be both namespaced and receiver functions, if the operand is not an // qualified variable name, return the (possibly) qualified name given the expressions. - return containers.ToQualifiedName(operand) + switch operand.Kind() { + case ast.IdentKind: + id := operand.AsIdent() + return id, true + case ast.SelectKind: + sel := operand.AsSelect() + // Test only expressions are not valid as qualified names. + if sel.IsTestOnly() { + return "", false + } + if qual, found := p.toQualifiedName(sel.Operand()); found { + return qual + "." + sel.FieldName(), true + } + } + return "", false } func stripLeadingDot(name string) string { diff --git a/vendor/github.com/google/cel-go/interpreter/prune.go b/vendor/github.com/google/cel-go/interpreter/prune.go index d1b5d6bd6..410d80dc4 100644 --- a/vendor/github.com/google/cel-go/interpreter/prune.go +++ b/vendor/github.com/google/cel-go/interpreter/prune.go @@ -15,19 +15,18 @@ package interpreter import ( + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/overloads" "github.com/google/cel-go/common/types" "github.com/google/cel-go/common/types/ref" "github.com/google/cel-go/common/types/traits" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" - structpb "google.golang.org/protobuf/types/known/structpb" ) type astPruner struct { - expr *exprpb.Expr - macroCalls map[int64]*exprpb.Expr + ast.ExprFactory + expr ast.Expr + macroCalls map[int64]ast.Expr state EvalState nextExprID int64 } @@ -67,84 +66,44 @@ type astPruner struct { // compiled and constant folded expressions, but is not willing to constant // fold(and thus cache results of) some external calls, then they can prepare // the overloads accordingly. -func PruneAst(expr *exprpb.Expr, macroCalls map[int64]*exprpb.Expr, state EvalState) *exprpb.ParsedExpr { +func PruneAst(expr ast.Expr, macroCalls map[int64]ast.Expr, state EvalState) *ast.AST { pruneState := NewEvalState() for _, id := range state.IDs() { v, _ := state.Value(id) pruneState.SetValue(id, v) } pruner := &astPruner{ - expr: expr, - macroCalls: macroCalls, - state: pruneState, - nextExprID: getMaxID(expr)} + ExprFactory: ast.NewExprFactory(), + expr: expr, + macroCalls: macroCalls, + state: pruneState, + nextExprID: getMaxID(expr)} newExpr, _ := pruner.maybePrune(expr) - return &exprpb.ParsedExpr{ - Expr: newExpr, - SourceInfo: &exprpb.SourceInfo{MacroCalls: pruner.macroCalls}, - } -} - -func (p *astPruner) createLiteral(id int64, val *exprpb.Constant) *exprpb.Expr { - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_ConstExpr{ - ConstExpr: val, - }, + newInfo := ast.NewSourceInfo(nil) + for id, call := range pruner.macroCalls { + newInfo.SetMacroCall(id, call) } + return ast.NewAST(newExpr, newInfo) } -func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, bool) { +func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (ast.Expr, bool) { switch v := val.(type) { - case types.Bool: + case types.Bool, types.Bytes, types.Double, types.Int, types.Null, types.String, types.Uint: p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: bool(v)}}), true - case types.Bytes: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: []byte(v)}}), true - case types.Double: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: float64(v)}}), true + return p.NewLiteral(id, val), true case types.Duration: p.state.SetValue(id, val) - durationString := string(v.ConvertToType(types.StringType).(types.String)) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Function: overloads.TypeConvertDuration, - Args: []*exprpb.Expr{ - p.createLiteral(p.nextID(), - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: durationString}}), - }, - }, - }, - }, true - case types.Int: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: int64(v)}}), true - case types.Uint: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: uint64(v)}}), true - case types.String: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: string(v)}}), true - case types.Null: - p.state.SetValue(id, val) - return p.createLiteral(id, - &exprpb.Constant{ConstantKind: &exprpb.Constant_NullValue{NullValue: v.Value().(structpb.NullValue)}}), true + durationString := v.ConvertToType(types.StringType).(types.String) + return p.NewCall(id, overloads.TypeConvertDuration, p.NewLiteral(p.nextID(), durationString)), true + case types.Timestamp: + timestampString := v.ConvertToType(types.StringType).(types.String) + return p.NewCall(id, overloads.TypeConvertTimestamp, p.NewLiteral(p.nextID(), timestampString)), true } // Attempt to build a list literal. if list, isList := val.(traits.Lister); isList { sz := list.Size().(types.Int) - elemExprs := make([]*exprpb.Expr, sz) + elemExprs := make([]ast.Expr, sz) for i := types.Int(0); i < sz; i++ { elem := list.Get(i) if types.IsUnknownOrError(elem) { @@ -157,20 +116,13 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo elemExprs[i] = elemExpr } p.state.SetValue(id, val) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: elemExprs, - }, - }, - }, true + return p.NewList(id, elemExprs, []int32{}), true } // Create a map literal if possible. if mp, isMap := val.(traits.Mapper); isMap { it := mp.Iterator() - entries := make([]*exprpb.Expr_CreateStruct_Entry, mp.Size().(types.Int)) + entries := make([]ast.EntryExpr, mp.Size().(types.Int)) i := 0 for it.HasNext() != types.False { key := it.Next() @@ -186,25 +138,12 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo if !ok { return nil, false } - entry := &exprpb.Expr_CreateStruct_Entry{ - Id: p.nextID(), - KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: keyExpr, - }, - Value: valExpr, - } + entry := p.NewMapEntry(p.nextID(), keyExpr, valExpr, false) entries[i] = entry i++ } p.state.SetValue(id, val) - return &exprpb.Expr{ - Id: id, - ExprKind: &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - Entries: entries, - }, - }, - }, true + return p.NewMap(id, entries), true } // TODO(issues/377) To construct message literals, the type provider will need to support @@ -212,210 +151,206 @@ func (p *astPruner) maybeCreateLiteral(id int64, val ref.Val) (*exprpb.Expr, boo return nil, false } -func (p *astPruner) maybePruneOptional(elem *exprpb.Expr) (*exprpb.Expr, bool) { - elemVal, found := p.value(elem.GetId()) +func (p *astPruner) maybePruneOptional(elem ast.Expr) (ast.Expr, bool) { + elemVal, found := p.value(elem.ID()) if found && elemVal.Type() == types.OptionalType { opt := elemVal.(*types.Optional) if !opt.HasValue() { return nil, true } - if newElem, pruned := p.maybeCreateLiteral(elem.GetId(), opt.GetValue()); pruned { + if newElem, pruned := p.maybeCreateLiteral(elem.ID(), opt.GetValue()); pruned { return newElem, true } } return elem, false } -func (p *astPruner) maybePruneIn(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) maybePruneIn(node ast.Expr) (ast.Expr, bool) { // elem in list - call := node.GetCallExpr() - val, exists := p.maybeValue(call.GetArgs()[1].GetId()) + call := node.AsCall() + val, exists := p.maybeValue(call.Args()[1].ID()) if !exists { return nil, false } if sz, ok := val.(traits.Sizer); ok && sz.Size() == types.IntZero { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } return nil, false } -func (p *astPruner) maybePruneLogicalNot(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() - arg := call.GetArgs()[0] - val, exists := p.maybeValue(arg.GetId()) +func (p *astPruner) maybePruneLogicalNot(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() + arg := call.Args()[0] + val, exists := p.maybeValue(arg.ID()) if !exists { return nil, false } if b, ok := val.(types.Bool); ok { - return p.maybeCreateLiteral(node.GetId(), !b) + return p.maybeCreateLiteral(node.ID(), !b) } return nil, false } -func (p *astPruner) maybePruneOr(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() +func (p *astPruner) maybePruneOr(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() // We know result is unknown, so we have at least one unknown arg // and if one side is a known value, we know we can ignore it. - if v, exists := p.maybeValue(call.GetArgs()[0].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[0].ID()); exists { if v == types.True { - return p.maybeCreateLiteral(node.GetId(), types.True) + return p.maybeCreateLiteral(node.ID(), types.True) } - return call.GetArgs()[1], true + return call.Args()[1], true } - if v, exists := p.maybeValue(call.GetArgs()[1].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[1].ID()); exists { if v == types.True { - return p.maybeCreateLiteral(node.GetId(), types.True) + return p.maybeCreateLiteral(node.ID(), types.True) } - return call.GetArgs()[0], true + return call.Args()[0], true } return nil, false } -func (p *astPruner) maybePruneAnd(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() +func (p *astPruner) maybePruneAnd(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() // We know result is unknown, so we have at least one unknown arg // and if one side is a known value, we know we can ignore it. - if v, exists := p.maybeValue(call.GetArgs()[0].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[0].ID()); exists { if v == types.False { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } - return call.GetArgs()[1], true + return call.Args()[1], true } - if v, exists := p.maybeValue(call.GetArgs()[1].GetId()); exists { + if v, exists := p.maybeValue(call.Args()[1].ID()); exists { if v == types.False { - return p.maybeCreateLiteral(node.GetId(), types.False) + return p.maybeCreateLiteral(node.ID(), types.False) } - return call.GetArgs()[0], true + return call.Args()[0], true } return nil, false } -func (p *astPruner) maybePruneConditional(node *exprpb.Expr) (*exprpb.Expr, bool) { - call := node.GetCallExpr() - cond, exists := p.maybeValue(call.GetArgs()[0].GetId()) +func (p *astPruner) maybePruneConditional(node ast.Expr) (ast.Expr, bool) { + call := node.AsCall() + cond, exists := p.maybeValue(call.Args()[0].ID()) if !exists { return nil, false } if cond.Value().(bool) { - return call.GetArgs()[1], true + return call.Args()[1], true } - return call.GetArgs()[2], true + return call.Args()[2], true } -func (p *astPruner) maybePruneFunction(node *exprpb.Expr) (*exprpb.Expr, bool) { - if _, exists := p.value(node.GetId()); !exists { +func (p *astPruner) maybePruneFunction(node ast.Expr) (ast.Expr, bool) { + if _, exists := p.value(node.ID()); !exists { return nil, false } - call := node.GetCallExpr() - if call.Function == operators.LogicalOr { + call := node.AsCall() + if call.FunctionName() == operators.LogicalOr { return p.maybePruneOr(node) } - if call.Function == operators.LogicalAnd { + if call.FunctionName() == operators.LogicalAnd { return p.maybePruneAnd(node) } - if call.Function == operators.Conditional { + if call.FunctionName() == operators.Conditional { return p.maybePruneConditional(node) } - if call.Function == operators.In { + if call.FunctionName() == operators.In { return p.maybePruneIn(node) } - if call.Function == operators.LogicalNot { + if call.FunctionName() == operators.LogicalNot { return p.maybePruneLogicalNot(node) } return nil, false } -func (p *astPruner) maybePrune(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) maybePrune(node ast.Expr) (ast.Expr, bool) { return p.prune(node) } -func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { +func (p *astPruner) prune(node ast.Expr) (ast.Expr, bool) { if node == nil { return node, false } - val, valueExists := p.maybeValue(node.GetId()) + val, valueExists := p.maybeValue(node.ID()) if valueExists { - if newNode, ok := p.maybeCreateLiteral(node.GetId(), val); ok { - delete(p.macroCalls, node.GetId()) + if newNode, ok := p.maybeCreateLiteral(node.ID(), val); ok { + delete(p.macroCalls, node.ID()) return newNode, true } } - if macro, found := p.macroCalls[node.GetId()]; found { + if macro, found := p.macroCalls[node.ID()]; found { + // Ensure that intermediate values for the comprehension are cleared during pruning + if node.Kind() == ast.ComprehensionKind { + compre := node.AsComprehension() + visit(macro, clearIterVarVisitor(compre.IterVar(), p.state)) + } // prune the expression in terms of the macro call instead of the expanded form. if newMacro, pruned := p.prune(macro); pruned { - p.macroCalls[node.GetId()] = newMacro + p.macroCalls[node.ID()] = newMacro } } // We have either an unknown/error value, or something we don't want to // transform, or expression was not evaluated. If possible, drill down // more. - switch node.GetExprKind().(type) { - case *exprpb.Expr_SelectExpr: - if operand, pruned := p.maybePrune(node.GetSelectExpr().GetOperand()); pruned { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{ - Operand: operand, - Field: node.GetSelectExpr().GetField(), - TestOnly: node.GetSelectExpr().GetTestOnly(), - }, - }, - }, true - } - case *exprpb.Expr_CallExpr: - var prunedCall bool - call := node.GetCallExpr() - args := call.GetArgs() - newArgs := make([]*exprpb.Expr, len(args)) - newCall := &exprpb.Expr_Call{ - Function: call.GetFunction(), - Target: call.GetTarget(), - Args: newArgs, - } - for i, arg := range args { - newArgs[i] = arg - if newArg, prunedArg := p.maybePrune(arg); prunedArg { - prunedCall = true - newArgs[i] = newArg + switch node.Kind() { + case ast.SelectKind: + sel := node.AsSelect() + if operand, isPruned := p.maybePrune(sel.Operand()); isPruned { + if sel.IsTestOnly() { + return p.NewPresenceTest(node.ID(), operand, sel.FieldName()), true } + return p.NewSelect(node.ID(), operand, sel.FieldName()), true } - if newTarget, prunedTarget := p.maybePrune(call.GetTarget()); prunedTarget { - prunedCall = true - newCall.Target = newTarget + case ast.CallKind: + argsPruned := false + call := node.AsCall() + args := call.Args() + newArgs := make([]ast.Expr, len(args)) + for i, a := range args { + newArgs[i] = a + if arg, isPruned := p.maybePrune(a); isPruned { + argsPruned = true + newArgs[i] = arg + } } - newNode := &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: newCall, - }, + if !call.IsMemberFunction() { + newCall := p.NewCall(node.ID(), call.FunctionName(), newArgs...) + if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned { + return prunedCall, true + } + return newCall, argsPruned } - if newExpr, pruned := p.maybePruneFunction(newNode); pruned { - newExpr, _ = p.maybePrune(newExpr) - return newExpr, true + newTarget := call.Target() + targetPruned := false + if prunedTarget, isPruned := p.maybePrune(call.Target()); isPruned { + targetPruned = true + newTarget = prunedTarget } - if prunedCall { - return newNode, true + newCall := p.NewMemberCall(node.ID(), call.FunctionName(), newTarget, newArgs...) + if prunedCall, isPruned := p.maybePruneFunction(newCall); isPruned { + return prunedCall, true } - case *exprpb.Expr_ListExpr: - elems := node.GetListExpr().GetElements() - optIndices := node.GetListExpr().GetOptionalIndices() + return newCall, targetPruned || argsPruned + case ast.ListKind: + l := node.AsList() + elems := l.Elements() + optIndices := l.OptionalIndices() optIndexMap := map[int32]bool{} for _, i := range optIndices { optIndexMap[i] = true } newOptIndexMap := make(map[int32]bool, len(optIndexMap)) - newElems := make([]*exprpb.Expr, 0, len(elems)) - var prunedList bool - + newElems := make([]ast.Expr, 0, len(elems)) + var listPruned bool prunedIdx := 0 for i, elem := range elems { _, isOpt := optIndexMap[int32(i)] if isOpt { newElem, pruned := p.maybePruneOptional(elem) if pruned { - prunedList = true + listPruned = true if newElem != nil { newElems = append(newElems, newElem) prunedIdx++ @@ -426,7 +361,7 @@ func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { } if newElem, prunedElem := p.maybePrune(elem); prunedElem { newElems = append(newElems, newElem) - prunedList = true + listPruned = true } else { newElems = append(newElems, elem) } @@ -438,55 +373,64 @@ func (p *astPruner) prune(node *exprpb.Expr) (*exprpb.Expr, bool) { optIndices[idx] = i idx++ } - if prunedList { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: newElems, - OptionalIndices: optIndices, - }, - }, - }, true + if listPruned { + return p.NewList(node.ID(), newElems, optIndices), true } - case *exprpb.Expr_StructExpr: - var prunedStruct bool - entries := node.GetStructExpr().GetEntries() - messageType := node.GetStructExpr().GetMessageName() - newEntries := make([]*exprpb.Expr_CreateStruct_Entry, len(entries)) + case ast.MapKind: + var mapPruned bool + m := node.AsMap() + entries := m.Entries() + newEntries := make([]ast.EntryExpr, len(entries)) for i, entry := range entries { newEntries[i] = entry - newKey, prunedKey := p.maybePrune(entry.GetMapKey()) - newValue, prunedValue := p.maybePrune(entry.GetValue()) - if !prunedKey && !prunedValue { + e := entry.AsMapEntry() + newKey, keyPruned := p.maybePrune(e.Key()) + newValue, valuePruned := p.maybePrune(e.Value()) + if !keyPruned && !valuePruned { continue } - prunedStruct = true - newEntry := &exprpb.Expr_CreateStruct_Entry{ - Value: newValue, - } - if messageType != "" { - newEntry.KeyKind = &exprpb.Expr_CreateStruct_Entry_FieldKey{ - FieldKey: entry.GetFieldKey(), - } - } else { - newEntry.KeyKind = &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: newKey, - } - } - newEntry.OptionalEntry = entry.GetOptionalEntry() + mapPruned = true + newEntry := p.NewMapEntry(entry.ID(), newKey, newValue, e.IsOptional()) newEntries[i] = newEntry } - if prunedStruct { - return &exprpb.Expr{ - Id: node.GetId(), - ExprKind: &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: messageType, - Entries: newEntries, - }, - }, - }, true + if mapPruned { + return p.NewMap(node.ID(), newEntries), true + } + case ast.StructKind: + var structPruned bool + obj := node.AsStruct() + fields := obj.Fields() + newFields := make([]ast.EntryExpr, len(fields)) + for i, field := range fields { + newFields[i] = field + f := field.AsStructField() + newValue, prunedValue := p.maybePrune(f.Value()) + if !prunedValue { + continue + } + structPruned = true + newEntry := p.NewStructField(field.ID(), f.Name(), newValue, f.IsOptional()) + newFields[i] = newEntry + } + if structPruned { + return p.NewStruct(node.ID(), obj.TypeName(), newFields), true + } + case ast.ComprehensionKind: + compre := node.AsComprehension() + // Only the range of the comprehension is pruned since the state tracking only records + // the last iteration of the comprehension and not each step in the evaluation which + // means that the any residuals computed in between might be inaccurate. + if newRange, pruned := p.maybePrune(compre.IterRange()); pruned { + return p.NewComprehension( + node.ID(), + newRange, + compre.IterVar(), + compre.AccuVar(), + compre.AccuInit(), + compre.LoopCondition(), + compre.LoopStep(), + compre.Result(), + ), true } } return node, false @@ -513,65 +457,86 @@ func (p *astPruner) nextID() int64 { type astVisitor struct { // visitEntry is called on every expr node, including those within a map/struct entry. - visitExpr func(expr *exprpb.Expr) + visitExpr func(expr ast.Expr) // visitEntry is called before entering the key, value of a map/struct entry. - visitEntry func(entry *exprpb.Expr_CreateStruct_Entry) + visitEntry func(entry ast.EntryExpr) } -func getMaxID(expr *exprpb.Expr) int64 { +func getMaxID(expr ast.Expr) int64 { maxID := int64(1) visit(expr, maxIDVisitor(&maxID)) return maxID } +func clearIterVarVisitor(varName string, state EvalState) astVisitor { + return astVisitor{ + visitExpr: func(e ast.Expr) { + if e.Kind() == ast.IdentKind && e.AsIdent() == varName { + state.SetValue(e.ID(), nil) + } + }, + } +} + func maxIDVisitor(maxID *int64) astVisitor { return astVisitor{ - visitExpr: func(e *exprpb.Expr) { - if e.GetId() >= *maxID { - *maxID = e.GetId() + 1 + visitExpr: func(e ast.Expr) { + if e.ID() >= *maxID { + *maxID = e.ID() + 1 } }, - visitEntry: func(e *exprpb.Expr_CreateStruct_Entry) { - if e.GetId() >= *maxID { - *maxID = e.GetId() + 1 + visitEntry: func(e ast.EntryExpr) { + if e.ID() >= *maxID { + *maxID = e.ID() + 1 } }, } } -func visit(expr *exprpb.Expr, visitor astVisitor) { - exprs := []*exprpb.Expr{expr} +func visit(expr ast.Expr, visitor astVisitor) { + exprs := []ast.Expr{expr} for len(exprs) != 0 { e := exprs[0] - visitor.visitExpr(e) + if visitor.visitExpr != nil { + visitor.visitExpr(e) + } exprs = exprs[1:] - switch e.GetExprKind().(type) { - case *exprpb.Expr_SelectExpr: - exprs = append(exprs, e.GetSelectExpr().GetOperand()) - case *exprpb.Expr_CallExpr: - call := e.GetCallExpr() - if call.GetTarget() != nil { - exprs = append(exprs, call.GetTarget()) + switch e.Kind() { + case ast.SelectKind: + exprs = append(exprs, e.AsSelect().Operand()) + case ast.CallKind: + call := e.AsCall() + if call.Target() != nil { + exprs = append(exprs, call.Target()) } - exprs = append(exprs, call.GetArgs()...) - case *exprpb.Expr_ComprehensionExpr: - compre := e.GetComprehensionExpr() + exprs = append(exprs, call.Args()...) + case ast.ComprehensionKind: + compre := e.AsComprehension() exprs = append(exprs, - compre.GetIterRange(), - compre.GetAccuInit(), - compre.GetLoopCondition(), - compre.GetLoopStep(), - compre.GetResult()) - case *exprpb.Expr_ListExpr: - list := e.GetListExpr() - exprs = append(exprs, list.GetElements()...) - case *exprpb.Expr_StructExpr: - for _, entry := range e.GetStructExpr().GetEntries() { - visitor.visitEntry(entry) - if entry.GetMapKey() != nil { - exprs = append(exprs, entry.GetMapKey()) + compre.IterRange(), + compre.AccuInit(), + compre.LoopCondition(), + compre.LoopStep(), + compre.Result()) + case ast.ListKind: + list := e.AsList() + exprs = append(exprs, list.Elements()...) + case ast.MapKind: + for _, entry := range e.AsMap().Entries() { + e := entry.AsMapEntry() + if visitor.visitEntry != nil { + visitor.visitEntry(entry) + } + exprs = append(exprs, e.Key()) + exprs = append(exprs, e.Value()) + } + case ast.StructKind: + for _, entry := range e.AsStruct().Fields() { + f := entry.AsStructField() + if visitor.visitEntry != nil { + visitor.visitEntry(entry) } - exprs = append(exprs, entry.GetValue()) + exprs = append(exprs, f.Value()) } } } diff --git a/vendor/github.com/google/cel-go/interpreter/runtimecost.go b/vendor/github.com/google/cel-go/interpreter/runtimecost.go index 80e7f6134..b9b307c15 100644 --- a/vendor/github.com/google/cel-go/interpreter/runtimecost.go +++ b/vendor/github.com/google/cel-go/interpreter/runtimecost.go @@ -65,13 +65,21 @@ func CostObserver(tracker *CostTracker) EvalObserver { // While the field names are identical, the boolean operation eval structs do not share an interface and so // must be handled individually. case *evalOr: - tracker.stack.drop(t.rhs.ID(), t.lhs.ID()) + for _, term := range t.terms { + tracker.stack.drop(term.ID()) + } case *evalAnd: - tracker.stack.drop(t.rhs.ID(), t.lhs.ID()) + for _, term := range t.terms { + tracker.stack.drop(term.ID()) + } case *evalExhaustiveOr: - tracker.stack.drop(t.rhs.ID(), t.lhs.ID()) + for _, term := range t.terms { + tracker.stack.drop(term.ID()) + } case *evalExhaustiveAnd: - tracker.stack.drop(t.rhs.ID(), t.lhs.ID()) + for _, term := range t.terms { + tracker.stack.drop(term.ID()) + } case *evalFold: tracker.stack.drop(t.iterRange.ID()) case Qualifier: @@ -125,6 +133,7 @@ func PresenceTestHasCost(hasCost bool) CostTrackerOption { func NewCostTracker(estimator ActualCostEstimator, opts ...CostTrackerOption) (*CostTracker, error) { tracker := &CostTracker{ Estimator: estimator, + overloadTrackers: map[string]FunctionTracker{}, presenceTestHasCost: true, } for _, opt := range opts { @@ -136,9 +145,24 @@ func NewCostTracker(estimator ActualCostEstimator, opts ...CostTrackerOption) (* return tracker, nil } +// OverloadCostTracker binds an overload ID to a runtime FunctionTracker implementation. +// +// OverloadCostTracker instances augment or override ActualCostEstimator decisions, allowing for versioned and/or +// optional cost tracking changes. +func OverloadCostTracker(overloadID string, fnTracker FunctionTracker) CostTrackerOption { + return func(tracker *CostTracker) error { + tracker.overloadTrackers[overloadID] = fnTracker + return nil + } +} + +// FunctionTracker computes the actual cost of evaluating the functions with the given arguments and result. +type FunctionTracker func(args []ref.Val, result ref.Val) *uint64 + // CostTracker represents the information needed for tracking runtime cost. type CostTracker struct { Estimator ActualCostEstimator + overloadTrackers map[string]FunctionTracker Limit *uint64 presenceTestHasCost bool @@ -151,10 +175,19 @@ func (c *CostTracker) ActualCost() uint64 { return c.cost } -func (c *CostTracker) costCall(call InterpretableCall, argValues []ref.Val, result ref.Val) uint64 { +func (c *CostTracker) costCall(call InterpretableCall, args []ref.Val, result ref.Val) uint64 { var cost uint64 + if len(c.overloadTrackers) != 0 { + if tracker, found := c.overloadTrackers[call.OverloadID()]; found { + callCost := tracker(args, result) + if callCost != nil { + cost += *callCost + return cost + } + } + } if c.Estimator != nil { - callCost := c.Estimator.CallCost(call.Function(), call.OverloadID(), argValues, result) + callCost := c.Estimator.CallCost(call.Function(), call.OverloadID(), args, result) if callCost != nil { cost += *callCost return cost @@ -165,11 +198,11 @@ func (c *CostTracker) costCall(call InterpretableCall, argValues []ref.Val, resu switch call.OverloadID() { // O(n) functions case overloads.StartsWithString, overloads.EndsWithString, overloads.StringToBytes, overloads.BytesToString, overloads.ExtQuoteString, overloads.ExtFormatString: - cost += uint64(math.Ceil(float64(c.actualSize(argValues[0])) * common.StringTraversalCostFactor)) + cost += uint64(math.Ceil(float64(c.actualSize(args[0])) * common.StringTraversalCostFactor)) case overloads.InList: // If a list is composed entirely of constant values this is O(1), but we don't account for that here. // We just assume all list containment checks are O(n). - cost += c.actualSize(argValues[1]) + cost += c.actualSize(args[1]) // O(min(m, n)) functions case overloads.LessString, overloads.GreaterString, overloads.LessEqualsString, overloads.GreaterEqualsString, overloads.LessBytes, overloads.GreaterBytes, overloads.LessEqualsBytes, overloads.GreaterEqualsBytes, @@ -177,8 +210,8 @@ func (c *CostTracker) costCall(call InterpretableCall, argValues []ref.Val, resu // When we check the equality of 2 scalar values (e.g. 2 integers, 2 floating-point numbers, 2 booleans etc.), // the CostTracker.actualSize() function by definition returns 1 for each operand, resulting in an overall cost // of 1. - lhsSize := c.actualSize(argValues[0]) - rhsSize := c.actualSize(argValues[1]) + lhsSize := c.actualSize(args[0]) + rhsSize := c.actualSize(args[1]) minSize := lhsSize if rhsSize < minSize { minSize = rhsSize @@ -187,23 +220,23 @@ func (c *CostTracker) costCall(call InterpretableCall, argValues []ref.Val, resu // O(m+n) functions case overloads.AddString, overloads.AddBytes: // In the worst case scenario, we would need to reallocate a new backing store and copy both operands over. - cost += uint64(math.Ceil(float64(c.actualSize(argValues[0])+c.actualSize(argValues[1])) * common.StringTraversalCostFactor)) + cost += uint64(math.Ceil(float64(c.actualSize(args[0])+c.actualSize(args[1])) * common.StringTraversalCostFactor)) // O(nm) functions case overloads.MatchesString: // https://swtch.com/~rsc/regexp/regexp1.html applies to RE2 implementation supported by CEL // Add one to string length for purposes of cost calculation to prevent product of string and regex to be 0 // in case where string is empty but regex is still expensive. - strCost := uint64(math.Ceil((1.0 + float64(c.actualSize(argValues[0]))) * common.StringTraversalCostFactor)) + strCost := uint64(math.Ceil((1.0 + float64(c.actualSize(args[0]))) * common.StringTraversalCostFactor)) // We don't know how many expressions are in the regex, just the string length (a huge // improvement here would be to somehow get a count the number of expressions in the regex or // how many states are in the regex state machine and use that to measure regex cost). // For now, we're making a guess that each expression in a regex is typically at least 4 chars // in length. - regexCost := uint64(math.Ceil(float64(c.actualSize(argValues[1])) * common.RegexStringLengthCostFactor)) + regexCost := uint64(math.Ceil(float64(c.actualSize(args[1])) * common.RegexStringLengthCostFactor)) cost += strCost * regexCost case overloads.ContainsString: - strCost := uint64(math.Ceil(float64(c.actualSize(argValues[0])) * common.StringTraversalCostFactor)) - substrCost := uint64(math.Ceil(float64(c.actualSize(argValues[1])) * common.StringTraversalCostFactor)) + strCost := uint64(math.Ceil(float64(c.actualSize(args[0])) * common.StringTraversalCostFactor)) + substrCost := uint64(math.Ceil(float64(c.actualSize(args[1])) * common.StringTraversalCostFactor)) cost += strCost * substrCost default: diff --git a/vendor/github.com/google/cel-go/parser/BUILD.bazel b/vendor/github.com/google/cel-go/parser/BUILD.bazel index 67ecc9554..97bc9bd43 100644 --- a/vendor/github.com/google/cel-go/parser/BUILD.bazel +++ b/vendor/github.com/google/cel-go/parser/BUILD.bazel @@ -20,10 +20,13 @@ go_library( visibility = ["//visibility:public"], deps = [ "//common:go_default_library", + "//common/ast:go_default_library", "//common/operators:go_default_library", "//common/runes:go_default_library", + "//common/types:go_default_library", + "//common/types/ref:go_default_library", "//parser/gen:go_default_library", - "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", + "@com_github_antlr4_go_antlr_v4//:go_default_library", "@org_golang_google_genproto_googleapis_api//expr/v1alpha1:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//types/known/structpb:go_default_library", @@ -43,10 +46,12 @@ go_test( ":go_default_library", ], deps = [ + "//common/ast:go_default_library", "//common/debug:go_default_library", + "//common/types:go_default_library", "//parser/gen:go_default_library", "//test:go_default_library", - "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", + "@com_github_antlr4_go_antlr_v4//:go_default_library", "@org_golang_google_protobuf//proto:go_default_library", "@org_golang_google_protobuf//testing/protocmp:go_default_library", ], diff --git a/vendor/github.com/google/cel-go/parser/errors.go b/vendor/github.com/google/cel-go/parser/errors.go index ce49bb87f..93ae7a3ad 100644 --- a/vendor/github.com/google/cel-go/parser/errors.go +++ b/vendor/github.com/google/cel-go/parser/errors.go @@ -22,9 +22,22 @@ import ( // parseErrors is a specialization of Errors. type parseErrors struct { - *common.Errors + errs *common.Errors +} + +// errorCount indicates the number of errors reported. +func (e *parseErrors) errorCount() int { + return len(e.errs.GetErrors()) +} + +func (e *parseErrors) internalError(message string) { + e.errs.ReportErrorAtID(0, common.NoLocation, message) } func (e *parseErrors) syntaxError(l common.Location, message string) { - e.ReportError(l, fmt.Sprintf("Syntax error: %s", message)) + e.errs.ReportErrorAtID(0, l, fmt.Sprintf("Syntax error: %s", message)) +} + +func (e *parseErrors) reportErrorAtID(id int64, l common.Location, message string, args ...any) { + e.errs.ReportErrorAtID(id, l, message, args...) } diff --git a/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel b/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel index 654d1de7a..e70433483 100644 --- a/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel +++ b/vendor/github.com/google/cel-go/parser/gen/BUILD.bazel @@ -21,6 +21,6 @@ go_library( ], importpath = "github.com/google/cel-go/parser/gen", deps = [ - "@com_github_antlr_antlr4_runtime_go_antlr_v4//:go_default_library", + "@com_github_antlr4_go_antlr_v4//:go_default_library", ], ) diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go b/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go index 0247f470a..c49d03867 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_base_listener.go @@ -1,7 +1,7 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen // CEL -import "github.com/antlr/antlr4/runtime/Go/antlr/v4" +import "github.com/antlr4-go/antlr/v4" // BaseCELListener is a complete listener for a parse tree produced by CELParser. type BaseCELListener struct{} diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go b/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go index 52a7f4dc5..b2c0783d3 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_base_visitor.go @@ -1,7 +1,8 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen // CEL -import "github.com/antlr/antlr4/runtime/Go/antlr/v4" +import "github.com/antlr4-go/antlr/v4" + type BaseCELVisitor struct { *antlr.BaseParseTreeVisitor diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go b/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go index 98ddc06d0..e026cc46f 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_lexer.go @@ -1,280 +1,278 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen - import ( "fmt" - "sync" + "sync" "unicode" - - "github.com/antlr/antlr4/runtime/Go/antlr/v4" + "github.com/antlr4-go/antlr/v4" ) - // Suppress unused import error var _ = fmt.Printf var _ = sync.Once{} var _ = unicode.IsLetter + type CELLexer struct { *antlr.BaseLexer channelNames []string - modeNames []string + modeNames []string // TODO: EOF string } -var cellexerLexerStaticData struct { - once sync.Once - serializedATN []int32 - channelNames []string - modeNames []string - literalNames []string - symbolicNames []string - ruleNames []string - predictionContextCache *antlr.PredictionContextCache - atn *antlr.ATN - decisionToDFA []*antlr.DFA +var CELLexerLexerStaticData struct { + once sync.Once + serializedATN []int32 + ChannelNames []string + ModeNames []string + LiteralNames []string + SymbolicNames []string + RuleNames []string + PredictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA } func cellexerLexerInit() { - staticData := &cellexerLexerStaticData - staticData.channelNames = []string{ - "DEFAULT_TOKEN_CHANNEL", "HIDDEN", - } - staticData.modeNames = []string{ - "DEFAULT_MODE", - } - staticData.literalNames = []string{ - "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'", - "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'", - "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'", - } - staticData.symbolicNames = []string{ - "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", - "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", - "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", - "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", - "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", - "STRING", "BYTES", "IDENTIFIER", - } - staticData.ruleNames = []string{ - "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", - "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", - "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", - "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", - "NUL", "BACKSLASH", "LETTER", "DIGIT", "EXPONENT", "HEXDIGIT", "RAW", - "ESC_SEQ", "ESC_CHAR_SEQ", "ESC_OCT_SEQ", "ESC_BYTE_SEQ", "ESC_UNI_SEQ", - "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING", - "BYTES", "IDENTIFIER", - } - staticData.predictionContextCache = antlr.NewPredictionContextCache() - staticData.serializedATN = []int32{ - 4, 0, 36, 423, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, - 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, - 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, - 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, - 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, - 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, - 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, - 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, - 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, - 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, - 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, - 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, - 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, - 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, - 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, - 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, - 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 177, 8, 31, 1, 31, 4, 31, 180, 8, 31, - 11, 31, 12, 31, 181, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, - 34, 3, 34, 192, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, - 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, - 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, - 1, 38, 1, 38, 1, 38, 3, 38, 225, 8, 38, 1, 39, 4, 39, 228, 8, 39, 11, 39, - 12, 39, 229, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 238, 8, 40, - 10, 40, 12, 40, 241, 9, 40, 1, 40, 1, 40, 1, 41, 4, 41, 246, 8, 41, 11, - 41, 12, 41, 247, 1, 41, 1, 41, 4, 41, 252, 8, 41, 11, 41, 12, 41, 253, - 1, 41, 3, 41, 257, 8, 41, 1, 41, 4, 41, 260, 8, 41, 11, 41, 12, 41, 261, - 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 268, 8, 41, 11, 41, 12, 41, 269, 1, - 41, 3, 41, 273, 8, 41, 3, 41, 275, 8, 41, 1, 42, 4, 42, 278, 8, 42, 11, - 42, 12, 42, 279, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 286, 8, 42, 11, 42, - 12, 42, 287, 3, 42, 290, 8, 42, 1, 43, 4, 43, 293, 8, 43, 11, 43, 12, 43, - 294, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 303, 8, 43, 11, 43, - 12, 43, 304, 1, 43, 1, 43, 3, 43, 309, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, - 314, 8, 44, 10, 44, 12, 44, 317, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, - 44, 323, 8, 44, 10, 44, 12, 44, 326, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, - 1, 44, 1, 44, 1, 44, 5, 44, 335, 8, 44, 10, 44, 12, 44, 338, 9, 44, 1, - 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 349, - 8, 44, 10, 44, 12, 44, 352, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, - 44, 5, 44, 360, 8, 44, 10, 44, 12, 44, 363, 9, 44, 1, 44, 1, 44, 1, 44, - 1, 44, 1, 44, 5, 44, 370, 8, 44, 10, 44, 12, 44, 373, 9, 44, 1, 44, 1, - 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 383, 8, 44, 10, 44, - 12, 44, 386, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, - 44, 1, 44, 1, 44, 5, 44, 398, 8, 44, 10, 44, 12, 44, 401, 9, 44, 1, 44, - 1, 44, 1, 44, 1, 44, 3, 44, 407, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, - 46, 3, 46, 414, 8, 46, 1, 46, 1, 46, 1, 46, 5, 46, 419, 8, 46, 10, 46, - 12, 46, 422, 9, 46, 4, 336, 350, 384, 399, 0, 47, 1, 1, 3, 2, 5, 3, 7, - 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, - 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, - 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 0, 61, 0, 63, 0, - 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 29, 81, 30, 83, 31, - 85, 32, 87, 33, 89, 34, 91, 35, 93, 36, 1, 0, 16, 2, 0, 65, 90, 97, 122, - 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 3, 0, 48, 57, 65, 70, 97, - 102, 2, 0, 82, 82, 114, 114, 10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 96, - 98, 102, 102, 110, 110, 114, 114, 116, 116, 118, 118, 2, 0, 88, 88, 120, - 120, 3, 0, 9, 10, 12, 13, 32, 32, 1, 0, 10, 10, 2, 0, 85, 85, 117, 117, - 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92, - 92, 1, 0, 92, 92, 3, 0, 10, 10, 13, 13, 34, 34, 3, 0, 10, 10, 13, 13, 39, - 39, 2, 0, 66, 66, 98, 98, 456, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, - 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, - 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, - 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, - 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, - 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, - 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, - 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, - 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, - 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, - 3, 98, 1, 0, 0, 0, 5, 101, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 106, 1, 0, - 0, 0, 11, 109, 1, 0, 0, 0, 13, 112, 1, 0, 0, 0, 15, 114, 1, 0, 0, 0, 17, - 117, 1, 0, 0, 0, 19, 120, 1, 0, 0, 0, 21, 122, 1, 0, 0, 0, 23, 124, 1, - 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 128, 1, 0, 0, 0, 29, 130, 1, 0, 0, 0, - 31, 132, 1, 0, 0, 0, 33, 134, 1, 0, 0, 0, 35, 136, 1, 0, 0, 0, 37, 138, - 1, 0, 0, 0, 39, 140, 1, 0, 0, 0, 41, 142, 1, 0, 0, 0, 43, 144, 1, 0, 0, - 0, 45, 146, 1, 0, 0, 0, 47, 148, 1, 0, 0, 0, 49, 150, 1, 0, 0, 0, 51, 152, - 1, 0, 0, 0, 53, 157, 1, 0, 0, 0, 55, 163, 1, 0, 0, 0, 57, 168, 1, 0, 0, - 0, 59, 170, 1, 0, 0, 0, 61, 172, 1, 0, 0, 0, 63, 174, 1, 0, 0, 0, 65, 183, - 1, 0, 0, 0, 67, 185, 1, 0, 0, 0, 69, 191, 1, 0, 0, 0, 71, 193, 1, 0, 0, - 0, 73, 196, 1, 0, 0, 0, 75, 201, 1, 0, 0, 0, 77, 224, 1, 0, 0, 0, 79, 227, - 1, 0, 0, 0, 81, 233, 1, 0, 0, 0, 83, 274, 1, 0, 0, 0, 85, 289, 1, 0, 0, - 0, 87, 308, 1, 0, 0, 0, 89, 406, 1, 0, 0, 0, 91, 408, 1, 0, 0, 0, 93, 413, - 1, 0, 0, 0, 95, 96, 5, 61, 0, 0, 96, 97, 5, 61, 0, 0, 97, 2, 1, 0, 0, 0, - 98, 99, 5, 33, 0, 0, 99, 100, 5, 61, 0, 0, 100, 4, 1, 0, 0, 0, 101, 102, - 5, 105, 0, 0, 102, 103, 5, 110, 0, 0, 103, 6, 1, 0, 0, 0, 104, 105, 5, - 60, 0, 0, 105, 8, 1, 0, 0, 0, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, - 0, 108, 10, 1, 0, 0, 0, 109, 110, 5, 62, 0, 0, 110, 111, 5, 61, 0, 0, 111, - 12, 1, 0, 0, 0, 112, 113, 5, 62, 0, 0, 113, 14, 1, 0, 0, 0, 114, 115, 5, - 38, 0, 0, 115, 116, 5, 38, 0, 0, 116, 16, 1, 0, 0, 0, 117, 118, 5, 124, - 0, 0, 118, 119, 5, 124, 0, 0, 119, 18, 1, 0, 0, 0, 120, 121, 5, 91, 0, - 0, 121, 20, 1, 0, 0, 0, 122, 123, 5, 93, 0, 0, 123, 22, 1, 0, 0, 0, 124, - 125, 5, 123, 0, 0, 125, 24, 1, 0, 0, 0, 126, 127, 5, 125, 0, 0, 127, 26, - 1, 0, 0, 0, 128, 129, 5, 40, 0, 0, 129, 28, 1, 0, 0, 0, 130, 131, 5, 41, - 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 5, 46, 0, 0, 133, 32, 1, 0, 0, 0, - 134, 135, 5, 44, 0, 0, 135, 34, 1, 0, 0, 0, 136, 137, 5, 45, 0, 0, 137, - 36, 1, 0, 0, 0, 138, 139, 5, 33, 0, 0, 139, 38, 1, 0, 0, 0, 140, 141, 5, - 63, 0, 0, 141, 40, 1, 0, 0, 0, 142, 143, 5, 58, 0, 0, 143, 42, 1, 0, 0, - 0, 144, 145, 5, 43, 0, 0, 145, 44, 1, 0, 0, 0, 146, 147, 5, 42, 0, 0, 147, - 46, 1, 0, 0, 0, 148, 149, 5, 47, 0, 0, 149, 48, 1, 0, 0, 0, 150, 151, 5, - 37, 0, 0, 151, 50, 1, 0, 0, 0, 152, 153, 5, 116, 0, 0, 153, 154, 5, 114, - 0, 0, 154, 155, 5, 117, 0, 0, 155, 156, 5, 101, 0, 0, 156, 52, 1, 0, 0, - 0, 157, 158, 5, 102, 0, 0, 158, 159, 5, 97, 0, 0, 159, 160, 5, 108, 0, - 0, 160, 161, 5, 115, 0, 0, 161, 162, 5, 101, 0, 0, 162, 54, 1, 0, 0, 0, - 163, 164, 5, 110, 0, 0, 164, 165, 5, 117, 0, 0, 165, 166, 5, 108, 0, 0, - 166, 167, 5, 108, 0, 0, 167, 56, 1, 0, 0, 0, 168, 169, 5, 92, 0, 0, 169, - 58, 1, 0, 0, 0, 170, 171, 7, 0, 0, 0, 171, 60, 1, 0, 0, 0, 172, 173, 2, - 48, 57, 0, 173, 62, 1, 0, 0, 0, 174, 176, 7, 1, 0, 0, 175, 177, 7, 2, 0, - 0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178, - 180, 3, 61, 30, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179, - 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 64, 1, 0, 0, 0, 183, 184, 7, 3, - 0, 0, 184, 66, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186, 68, 1, 0, 0, 0, 187, - 192, 3, 71, 35, 0, 188, 192, 3, 75, 37, 0, 189, 192, 3, 77, 38, 0, 190, - 192, 3, 73, 36, 0, 191, 187, 1, 0, 0, 0, 191, 188, 1, 0, 0, 0, 191, 189, - 1, 0, 0, 0, 191, 190, 1, 0, 0, 0, 192, 70, 1, 0, 0, 0, 193, 194, 3, 57, - 28, 0, 194, 195, 7, 5, 0, 0, 195, 72, 1, 0, 0, 0, 196, 197, 3, 57, 28, - 0, 197, 198, 2, 48, 51, 0, 198, 199, 2, 48, 55, 0, 199, 200, 2, 48, 55, - 0, 200, 74, 1, 0, 0, 0, 201, 202, 3, 57, 28, 0, 202, 203, 7, 6, 0, 0, 203, - 204, 3, 65, 32, 0, 204, 205, 3, 65, 32, 0, 205, 76, 1, 0, 0, 0, 206, 207, - 3, 57, 28, 0, 207, 208, 5, 117, 0, 0, 208, 209, 3, 65, 32, 0, 209, 210, - 3, 65, 32, 0, 210, 211, 3, 65, 32, 0, 211, 212, 3, 65, 32, 0, 212, 225, - 1, 0, 0, 0, 213, 214, 3, 57, 28, 0, 214, 215, 5, 85, 0, 0, 215, 216, 3, - 65, 32, 0, 216, 217, 3, 65, 32, 0, 217, 218, 3, 65, 32, 0, 218, 219, 3, - 65, 32, 0, 219, 220, 3, 65, 32, 0, 220, 221, 3, 65, 32, 0, 221, 222, 3, - 65, 32, 0, 222, 223, 3, 65, 32, 0, 223, 225, 1, 0, 0, 0, 224, 206, 1, 0, - 0, 0, 224, 213, 1, 0, 0, 0, 225, 78, 1, 0, 0, 0, 226, 228, 7, 7, 0, 0, - 227, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229, - 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 39, 0, 0, 232, 80, - 1, 0, 0, 0, 233, 234, 5, 47, 0, 0, 234, 235, 5, 47, 0, 0, 235, 239, 1, - 0, 0, 0, 236, 238, 8, 8, 0, 0, 237, 236, 1, 0, 0, 0, 238, 241, 1, 0, 0, - 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241, - 239, 1, 0, 0, 0, 242, 243, 6, 40, 0, 0, 243, 82, 1, 0, 0, 0, 244, 246, - 3, 61, 30, 0, 245, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 245, 1, - 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 5, 46, 0, - 0, 250, 252, 3, 61, 30, 0, 251, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, - 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 256, 1, 0, 0, 0, 255, - 257, 3, 63, 31, 0, 256, 255, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 275, - 1, 0, 0, 0, 258, 260, 3, 61, 30, 0, 259, 258, 1, 0, 0, 0, 260, 261, 1, - 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0, - 0, 263, 264, 3, 63, 31, 0, 264, 275, 1, 0, 0, 0, 265, 267, 5, 46, 0, 0, - 266, 268, 3, 61, 30, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, - 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0, 0, 0, 271, 273, - 3, 63, 31, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 275, 1, - 0, 0, 0, 274, 245, 1, 0, 0, 0, 274, 259, 1, 0, 0, 0, 274, 265, 1, 0, 0, - 0, 275, 84, 1, 0, 0, 0, 276, 278, 3, 61, 30, 0, 277, 276, 1, 0, 0, 0, 278, - 279, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 290, - 1, 0, 0, 0, 281, 282, 5, 48, 0, 0, 282, 283, 5, 120, 0, 0, 283, 285, 1, - 0, 0, 0, 284, 286, 3, 65, 32, 0, 285, 284, 1, 0, 0, 0, 286, 287, 1, 0, - 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 290, 1, 0, 0, 0, - 289, 277, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 290, 86, 1, 0, 0, 0, 291, 293, - 3, 61, 30, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1, - 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 7, 9, 0, - 0, 297, 309, 1, 0, 0, 0, 298, 299, 5, 48, 0, 0, 299, 300, 5, 120, 0, 0, - 300, 302, 1, 0, 0, 0, 301, 303, 3, 65, 32, 0, 302, 301, 1, 0, 0, 0, 303, - 304, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, - 1, 0, 0, 0, 306, 307, 7, 9, 0, 0, 307, 309, 1, 0, 0, 0, 308, 292, 1, 0, - 0, 0, 308, 298, 1, 0, 0, 0, 309, 88, 1, 0, 0, 0, 310, 315, 5, 34, 0, 0, - 311, 314, 3, 69, 34, 0, 312, 314, 8, 10, 0, 0, 313, 311, 1, 0, 0, 0, 313, - 312, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, - 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 407, 5, 34, - 0, 0, 319, 324, 5, 39, 0, 0, 320, 323, 3, 69, 34, 0, 321, 323, 8, 11, 0, - 0, 322, 320, 1, 0, 0, 0, 322, 321, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, - 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 327, 1, 0, 0, 0, 326, 324, - 1, 0, 0, 0, 327, 407, 5, 39, 0, 0, 328, 329, 5, 34, 0, 0, 329, 330, 5, - 34, 0, 0, 330, 331, 5, 34, 0, 0, 331, 336, 1, 0, 0, 0, 332, 335, 3, 69, - 34, 0, 333, 335, 8, 12, 0, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, - 0, 335, 338, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, - 339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 340, 5, 34, 0, 0, 340, 341, - 5, 34, 0, 0, 341, 407, 5, 34, 0, 0, 342, 343, 5, 39, 0, 0, 343, 344, 5, - 39, 0, 0, 344, 345, 5, 39, 0, 0, 345, 350, 1, 0, 0, 0, 346, 349, 3, 69, - 34, 0, 347, 349, 8, 12, 0, 0, 348, 346, 1, 0, 0, 0, 348, 347, 1, 0, 0, - 0, 349, 352, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, - 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 5, 39, 0, 0, 354, 355, - 5, 39, 0, 0, 355, 407, 5, 39, 0, 0, 356, 357, 3, 67, 33, 0, 357, 361, 5, - 34, 0, 0, 358, 360, 8, 13, 0, 0, 359, 358, 1, 0, 0, 0, 360, 363, 1, 0, - 0, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 364, 1, 0, 0, 0, - 363, 361, 1, 0, 0, 0, 364, 365, 5, 34, 0, 0, 365, 407, 1, 0, 0, 0, 366, - 367, 3, 67, 33, 0, 367, 371, 5, 39, 0, 0, 368, 370, 8, 14, 0, 0, 369, 368, - 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, - 0, 0, 372, 374, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 39, 0, 0, - 375, 407, 1, 0, 0, 0, 376, 377, 3, 67, 33, 0, 377, 378, 5, 34, 0, 0, 378, - 379, 5, 34, 0, 0, 379, 380, 5, 34, 0, 0, 380, 384, 1, 0, 0, 0, 381, 383, - 9, 0, 0, 0, 382, 381, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 1, 0, - 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, - 387, 388, 5, 34, 0, 0, 388, 389, 5, 34, 0, 0, 389, 390, 5, 34, 0, 0, 390, - 407, 1, 0, 0, 0, 391, 392, 3, 67, 33, 0, 392, 393, 5, 39, 0, 0, 393, 394, - 5, 39, 0, 0, 394, 395, 5, 39, 0, 0, 395, 399, 1, 0, 0, 0, 396, 398, 9, - 0, 0, 0, 397, 396, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 1, 0, 0, - 0, 399, 397, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, - 403, 5, 39, 0, 0, 403, 404, 5, 39, 0, 0, 404, 405, 5, 39, 0, 0, 405, 407, - 1, 0, 0, 0, 406, 310, 1, 0, 0, 0, 406, 319, 1, 0, 0, 0, 406, 328, 1, 0, - 0, 0, 406, 342, 1, 0, 0, 0, 406, 356, 1, 0, 0, 0, 406, 366, 1, 0, 0, 0, - 406, 376, 1, 0, 0, 0, 406, 391, 1, 0, 0, 0, 407, 90, 1, 0, 0, 0, 408, 409, - 7, 15, 0, 0, 409, 410, 3, 89, 44, 0, 410, 92, 1, 0, 0, 0, 411, 414, 3, - 59, 29, 0, 412, 414, 5, 95, 0, 0, 413, 411, 1, 0, 0, 0, 413, 412, 1, 0, - 0, 0, 414, 420, 1, 0, 0, 0, 415, 419, 3, 59, 29, 0, 416, 419, 3, 61, 30, - 0, 417, 419, 5, 95, 0, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, - 417, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, - 1, 0, 0, 0, 421, 94, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 36, 0, 176, 181, - 191, 224, 229, 239, 247, 253, 256, 261, 269, 272, 274, 279, 287, 289, 294, - 304, 308, 313, 315, 322, 324, 334, 336, 348, 350, 361, 371, 384, 399, 406, - 413, 418, 420, 1, 0, 1, 0, - } - deserializer := antlr.NewATNDeserializer(nil) - staticData.atn = deserializer.Deserialize(staticData.serializedATN) - atn := staticData.atn - staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) - decisionToDFA := staticData.decisionToDFA - for index, state := range atn.DecisionToState { - decisionToDFA[index] = antlr.NewDFA(state, index) - } + staticData := &CELLexerLexerStaticData + staticData.ChannelNames = []string{ + "DEFAULT_TOKEN_CHANNEL", "HIDDEN", + } + staticData.ModeNames = []string{ + "DEFAULT_MODE", + } + staticData.LiteralNames = []string{ + "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'", + "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'", + "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'", + } + staticData.SymbolicNames = []string{ + "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", + "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", + "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", + "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", + "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", + "STRING", "BYTES", "IDENTIFIER", + } + staticData.RuleNames = []string{ + "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", + "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", + "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", + "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", + "NUL", "BACKSLASH", "LETTER", "DIGIT", "EXPONENT", "HEXDIGIT", "RAW", + "ESC_SEQ", "ESC_CHAR_SEQ", "ESC_OCT_SEQ", "ESC_BYTE_SEQ", "ESC_UNI_SEQ", + "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", "STRING", + "BYTES", "IDENTIFIER", + } + staticData.PredictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = []int32{ + 4, 0, 36, 423, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, + 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, + 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, + 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, + 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, + 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, + 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, + 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, + 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, + 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, + 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, + 1, 8, 1, 9, 1, 9, 1, 10, 1, 10, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, + 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 17, 1, 17, 1, 18, 1, 18, 1, + 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 24, + 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, + 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, + 1, 30, 1, 30, 1, 31, 1, 31, 3, 31, 177, 8, 31, 1, 31, 4, 31, 180, 8, 31, + 11, 31, 12, 31, 181, 1, 32, 1, 32, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 1, + 34, 3, 34, 192, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, + 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, + 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, + 1, 38, 1, 38, 1, 38, 3, 38, 225, 8, 38, 1, 39, 4, 39, 228, 8, 39, 11, 39, + 12, 39, 229, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 238, 8, 40, + 10, 40, 12, 40, 241, 9, 40, 1, 40, 1, 40, 1, 41, 4, 41, 246, 8, 41, 11, + 41, 12, 41, 247, 1, 41, 1, 41, 4, 41, 252, 8, 41, 11, 41, 12, 41, 253, + 1, 41, 3, 41, 257, 8, 41, 1, 41, 4, 41, 260, 8, 41, 11, 41, 12, 41, 261, + 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 268, 8, 41, 11, 41, 12, 41, 269, 1, + 41, 3, 41, 273, 8, 41, 3, 41, 275, 8, 41, 1, 42, 4, 42, 278, 8, 42, 11, + 42, 12, 42, 279, 1, 42, 1, 42, 1, 42, 1, 42, 4, 42, 286, 8, 42, 11, 42, + 12, 42, 287, 3, 42, 290, 8, 42, 1, 43, 4, 43, 293, 8, 43, 11, 43, 12, 43, + 294, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 4, 43, 303, 8, 43, 11, 43, + 12, 43, 304, 1, 43, 1, 43, 3, 43, 309, 8, 43, 1, 44, 1, 44, 1, 44, 5, 44, + 314, 8, 44, 10, 44, 12, 44, 317, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, + 44, 323, 8, 44, 10, 44, 12, 44, 326, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, + 1, 44, 1, 44, 1, 44, 5, 44, 335, 8, 44, 10, 44, 12, 44, 338, 9, 44, 1, + 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 349, + 8, 44, 10, 44, 12, 44, 352, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, + 44, 5, 44, 360, 8, 44, 10, 44, 12, 44, 363, 9, 44, 1, 44, 1, 44, 1, 44, + 1, 44, 1, 44, 5, 44, 370, 8, 44, 10, 44, 12, 44, 373, 9, 44, 1, 44, 1, + 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 5, 44, 383, 8, 44, 10, 44, + 12, 44, 386, 9, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, + 44, 1, 44, 1, 44, 5, 44, 398, 8, 44, 10, 44, 12, 44, 401, 9, 44, 1, 44, + 1, 44, 1, 44, 1, 44, 3, 44, 407, 8, 44, 1, 45, 1, 45, 1, 45, 1, 46, 1, + 46, 3, 46, 414, 8, 46, 1, 46, 1, 46, 1, 46, 5, 46, 419, 8, 46, 10, 46, + 12, 46, 422, 9, 46, 4, 336, 350, 384, 399, 0, 47, 1, 1, 3, 2, 5, 3, 7, + 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 23, 12, 25, 13, 27, + 14, 29, 15, 31, 16, 33, 17, 35, 18, 37, 19, 39, 20, 41, 21, 43, 22, 45, + 23, 47, 24, 49, 25, 51, 26, 53, 27, 55, 28, 57, 0, 59, 0, 61, 0, 63, 0, + 65, 0, 67, 0, 69, 0, 71, 0, 73, 0, 75, 0, 77, 0, 79, 29, 81, 30, 83, 31, + 85, 32, 87, 33, 89, 34, 91, 35, 93, 36, 1, 0, 16, 2, 0, 65, 90, 97, 122, + 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 3, 0, 48, 57, 65, 70, 97, + 102, 2, 0, 82, 82, 114, 114, 10, 0, 34, 34, 39, 39, 63, 63, 92, 92, 96, + 98, 102, 102, 110, 110, 114, 114, 116, 116, 118, 118, 2, 0, 88, 88, 120, + 120, 3, 0, 9, 10, 12, 13, 32, 32, 1, 0, 10, 10, 2, 0, 85, 85, 117, 117, + 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 4, 0, 10, 10, 13, 13, 39, 39, 92, + 92, 1, 0, 92, 92, 3, 0, 10, 10, 13, 13, 34, 34, 3, 0, 10, 10, 13, 13, 39, + 39, 2, 0, 66, 66, 98, 98, 456, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, + 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, + 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, + 21, 1, 0, 0, 0, 0, 23, 1, 0, 0, 0, 0, 25, 1, 0, 0, 0, 0, 27, 1, 0, 0, 0, + 0, 29, 1, 0, 0, 0, 0, 31, 1, 0, 0, 0, 0, 33, 1, 0, 0, 0, 0, 35, 1, 0, 0, + 0, 0, 37, 1, 0, 0, 0, 0, 39, 1, 0, 0, 0, 0, 41, 1, 0, 0, 0, 0, 43, 1, 0, + 0, 0, 0, 45, 1, 0, 0, 0, 0, 47, 1, 0, 0, 0, 0, 49, 1, 0, 0, 0, 0, 51, 1, + 0, 0, 0, 0, 53, 1, 0, 0, 0, 0, 55, 1, 0, 0, 0, 0, 79, 1, 0, 0, 0, 0, 81, + 1, 0, 0, 0, 0, 83, 1, 0, 0, 0, 0, 85, 1, 0, 0, 0, 0, 87, 1, 0, 0, 0, 0, + 89, 1, 0, 0, 0, 0, 91, 1, 0, 0, 0, 0, 93, 1, 0, 0, 0, 1, 95, 1, 0, 0, 0, + 3, 98, 1, 0, 0, 0, 5, 101, 1, 0, 0, 0, 7, 104, 1, 0, 0, 0, 9, 106, 1, 0, + 0, 0, 11, 109, 1, 0, 0, 0, 13, 112, 1, 0, 0, 0, 15, 114, 1, 0, 0, 0, 17, + 117, 1, 0, 0, 0, 19, 120, 1, 0, 0, 0, 21, 122, 1, 0, 0, 0, 23, 124, 1, + 0, 0, 0, 25, 126, 1, 0, 0, 0, 27, 128, 1, 0, 0, 0, 29, 130, 1, 0, 0, 0, + 31, 132, 1, 0, 0, 0, 33, 134, 1, 0, 0, 0, 35, 136, 1, 0, 0, 0, 37, 138, + 1, 0, 0, 0, 39, 140, 1, 0, 0, 0, 41, 142, 1, 0, 0, 0, 43, 144, 1, 0, 0, + 0, 45, 146, 1, 0, 0, 0, 47, 148, 1, 0, 0, 0, 49, 150, 1, 0, 0, 0, 51, 152, + 1, 0, 0, 0, 53, 157, 1, 0, 0, 0, 55, 163, 1, 0, 0, 0, 57, 168, 1, 0, 0, + 0, 59, 170, 1, 0, 0, 0, 61, 172, 1, 0, 0, 0, 63, 174, 1, 0, 0, 0, 65, 183, + 1, 0, 0, 0, 67, 185, 1, 0, 0, 0, 69, 191, 1, 0, 0, 0, 71, 193, 1, 0, 0, + 0, 73, 196, 1, 0, 0, 0, 75, 201, 1, 0, 0, 0, 77, 224, 1, 0, 0, 0, 79, 227, + 1, 0, 0, 0, 81, 233, 1, 0, 0, 0, 83, 274, 1, 0, 0, 0, 85, 289, 1, 0, 0, + 0, 87, 308, 1, 0, 0, 0, 89, 406, 1, 0, 0, 0, 91, 408, 1, 0, 0, 0, 93, 413, + 1, 0, 0, 0, 95, 96, 5, 61, 0, 0, 96, 97, 5, 61, 0, 0, 97, 2, 1, 0, 0, 0, + 98, 99, 5, 33, 0, 0, 99, 100, 5, 61, 0, 0, 100, 4, 1, 0, 0, 0, 101, 102, + 5, 105, 0, 0, 102, 103, 5, 110, 0, 0, 103, 6, 1, 0, 0, 0, 104, 105, 5, + 60, 0, 0, 105, 8, 1, 0, 0, 0, 106, 107, 5, 60, 0, 0, 107, 108, 5, 61, 0, + 0, 108, 10, 1, 0, 0, 0, 109, 110, 5, 62, 0, 0, 110, 111, 5, 61, 0, 0, 111, + 12, 1, 0, 0, 0, 112, 113, 5, 62, 0, 0, 113, 14, 1, 0, 0, 0, 114, 115, 5, + 38, 0, 0, 115, 116, 5, 38, 0, 0, 116, 16, 1, 0, 0, 0, 117, 118, 5, 124, + 0, 0, 118, 119, 5, 124, 0, 0, 119, 18, 1, 0, 0, 0, 120, 121, 5, 91, 0, + 0, 121, 20, 1, 0, 0, 0, 122, 123, 5, 93, 0, 0, 123, 22, 1, 0, 0, 0, 124, + 125, 5, 123, 0, 0, 125, 24, 1, 0, 0, 0, 126, 127, 5, 125, 0, 0, 127, 26, + 1, 0, 0, 0, 128, 129, 5, 40, 0, 0, 129, 28, 1, 0, 0, 0, 130, 131, 5, 41, + 0, 0, 131, 30, 1, 0, 0, 0, 132, 133, 5, 46, 0, 0, 133, 32, 1, 0, 0, 0, + 134, 135, 5, 44, 0, 0, 135, 34, 1, 0, 0, 0, 136, 137, 5, 45, 0, 0, 137, + 36, 1, 0, 0, 0, 138, 139, 5, 33, 0, 0, 139, 38, 1, 0, 0, 0, 140, 141, 5, + 63, 0, 0, 141, 40, 1, 0, 0, 0, 142, 143, 5, 58, 0, 0, 143, 42, 1, 0, 0, + 0, 144, 145, 5, 43, 0, 0, 145, 44, 1, 0, 0, 0, 146, 147, 5, 42, 0, 0, 147, + 46, 1, 0, 0, 0, 148, 149, 5, 47, 0, 0, 149, 48, 1, 0, 0, 0, 150, 151, 5, + 37, 0, 0, 151, 50, 1, 0, 0, 0, 152, 153, 5, 116, 0, 0, 153, 154, 5, 114, + 0, 0, 154, 155, 5, 117, 0, 0, 155, 156, 5, 101, 0, 0, 156, 52, 1, 0, 0, + 0, 157, 158, 5, 102, 0, 0, 158, 159, 5, 97, 0, 0, 159, 160, 5, 108, 0, + 0, 160, 161, 5, 115, 0, 0, 161, 162, 5, 101, 0, 0, 162, 54, 1, 0, 0, 0, + 163, 164, 5, 110, 0, 0, 164, 165, 5, 117, 0, 0, 165, 166, 5, 108, 0, 0, + 166, 167, 5, 108, 0, 0, 167, 56, 1, 0, 0, 0, 168, 169, 5, 92, 0, 0, 169, + 58, 1, 0, 0, 0, 170, 171, 7, 0, 0, 0, 171, 60, 1, 0, 0, 0, 172, 173, 2, + 48, 57, 0, 173, 62, 1, 0, 0, 0, 174, 176, 7, 1, 0, 0, 175, 177, 7, 2, 0, + 0, 176, 175, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 179, 1, 0, 0, 0, 178, + 180, 3, 61, 30, 0, 179, 178, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 179, + 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 64, 1, 0, 0, 0, 183, 184, 7, 3, + 0, 0, 184, 66, 1, 0, 0, 0, 185, 186, 7, 4, 0, 0, 186, 68, 1, 0, 0, 0, 187, + 192, 3, 71, 35, 0, 188, 192, 3, 75, 37, 0, 189, 192, 3, 77, 38, 0, 190, + 192, 3, 73, 36, 0, 191, 187, 1, 0, 0, 0, 191, 188, 1, 0, 0, 0, 191, 189, + 1, 0, 0, 0, 191, 190, 1, 0, 0, 0, 192, 70, 1, 0, 0, 0, 193, 194, 3, 57, + 28, 0, 194, 195, 7, 5, 0, 0, 195, 72, 1, 0, 0, 0, 196, 197, 3, 57, 28, + 0, 197, 198, 2, 48, 51, 0, 198, 199, 2, 48, 55, 0, 199, 200, 2, 48, 55, + 0, 200, 74, 1, 0, 0, 0, 201, 202, 3, 57, 28, 0, 202, 203, 7, 6, 0, 0, 203, + 204, 3, 65, 32, 0, 204, 205, 3, 65, 32, 0, 205, 76, 1, 0, 0, 0, 206, 207, + 3, 57, 28, 0, 207, 208, 5, 117, 0, 0, 208, 209, 3, 65, 32, 0, 209, 210, + 3, 65, 32, 0, 210, 211, 3, 65, 32, 0, 211, 212, 3, 65, 32, 0, 212, 225, + 1, 0, 0, 0, 213, 214, 3, 57, 28, 0, 214, 215, 5, 85, 0, 0, 215, 216, 3, + 65, 32, 0, 216, 217, 3, 65, 32, 0, 217, 218, 3, 65, 32, 0, 218, 219, 3, + 65, 32, 0, 219, 220, 3, 65, 32, 0, 220, 221, 3, 65, 32, 0, 221, 222, 3, + 65, 32, 0, 222, 223, 3, 65, 32, 0, 223, 225, 1, 0, 0, 0, 224, 206, 1, 0, + 0, 0, 224, 213, 1, 0, 0, 0, 225, 78, 1, 0, 0, 0, 226, 228, 7, 7, 0, 0, + 227, 226, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 227, 1, 0, 0, 0, 229, + 230, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 39, 0, 0, 232, 80, + 1, 0, 0, 0, 233, 234, 5, 47, 0, 0, 234, 235, 5, 47, 0, 0, 235, 239, 1, + 0, 0, 0, 236, 238, 8, 8, 0, 0, 237, 236, 1, 0, 0, 0, 238, 241, 1, 0, 0, + 0, 239, 237, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 242, 1, 0, 0, 0, 241, + 239, 1, 0, 0, 0, 242, 243, 6, 40, 0, 0, 243, 82, 1, 0, 0, 0, 244, 246, + 3, 61, 30, 0, 245, 244, 1, 0, 0, 0, 246, 247, 1, 0, 0, 0, 247, 245, 1, + 0, 0, 0, 247, 248, 1, 0, 0, 0, 248, 249, 1, 0, 0, 0, 249, 251, 5, 46, 0, + 0, 250, 252, 3, 61, 30, 0, 251, 250, 1, 0, 0, 0, 252, 253, 1, 0, 0, 0, + 253, 251, 1, 0, 0, 0, 253, 254, 1, 0, 0, 0, 254, 256, 1, 0, 0, 0, 255, + 257, 3, 63, 31, 0, 256, 255, 1, 0, 0, 0, 256, 257, 1, 0, 0, 0, 257, 275, + 1, 0, 0, 0, 258, 260, 3, 61, 30, 0, 259, 258, 1, 0, 0, 0, 260, 261, 1, + 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 263, 1, 0, 0, + 0, 263, 264, 3, 63, 31, 0, 264, 275, 1, 0, 0, 0, 265, 267, 5, 46, 0, 0, + 266, 268, 3, 61, 30, 0, 267, 266, 1, 0, 0, 0, 268, 269, 1, 0, 0, 0, 269, + 267, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 272, 1, 0, 0, 0, 271, 273, + 3, 63, 31, 0, 272, 271, 1, 0, 0, 0, 272, 273, 1, 0, 0, 0, 273, 275, 1, + 0, 0, 0, 274, 245, 1, 0, 0, 0, 274, 259, 1, 0, 0, 0, 274, 265, 1, 0, 0, + 0, 275, 84, 1, 0, 0, 0, 276, 278, 3, 61, 30, 0, 277, 276, 1, 0, 0, 0, 278, + 279, 1, 0, 0, 0, 279, 277, 1, 0, 0, 0, 279, 280, 1, 0, 0, 0, 280, 290, + 1, 0, 0, 0, 281, 282, 5, 48, 0, 0, 282, 283, 5, 120, 0, 0, 283, 285, 1, + 0, 0, 0, 284, 286, 3, 65, 32, 0, 285, 284, 1, 0, 0, 0, 286, 287, 1, 0, + 0, 0, 287, 285, 1, 0, 0, 0, 287, 288, 1, 0, 0, 0, 288, 290, 1, 0, 0, 0, + 289, 277, 1, 0, 0, 0, 289, 281, 1, 0, 0, 0, 290, 86, 1, 0, 0, 0, 291, 293, + 3, 61, 30, 0, 292, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 292, 1, + 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 7, 9, 0, + 0, 297, 309, 1, 0, 0, 0, 298, 299, 5, 48, 0, 0, 299, 300, 5, 120, 0, 0, + 300, 302, 1, 0, 0, 0, 301, 303, 3, 65, 32, 0, 302, 301, 1, 0, 0, 0, 303, + 304, 1, 0, 0, 0, 304, 302, 1, 0, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, + 1, 0, 0, 0, 306, 307, 7, 9, 0, 0, 307, 309, 1, 0, 0, 0, 308, 292, 1, 0, + 0, 0, 308, 298, 1, 0, 0, 0, 309, 88, 1, 0, 0, 0, 310, 315, 5, 34, 0, 0, + 311, 314, 3, 69, 34, 0, 312, 314, 8, 10, 0, 0, 313, 311, 1, 0, 0, 0, 313, + 312, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, + 1, 0, 0, 0, 316, 318, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 407, 5, 34, + 0, 0, 319, 324, 5, 39, 0, 0, 320, 323, 3, 69, 34, 0, 321, 323, 8, 11, 0, + 0, 322, 320, 1, 0, 0, 0, 322, 321, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, + 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 327, 1, 0, 0, 0, 326, 324, + 1, 0, 0, 0, 327, 407, 5, 39, 0, 0, 328, 329, 5, 34, 0, 0, 329, 330, 5, + 34, 0, 0, 330, 331, 5, 34, 0, 0, 331, 336, 1, 0, 0, 0, 332, 335, 3, 69, + 34, 0, 333, 335, 8, 12, 0, 0, 334, 332, 1, 0, 0, 0, 334, 333, 1, 0, 0, + 0, 335, 338, 1, 0, 0, 0, 336, 337, 1, 0, 0, 0, 336, 334, 1, 0, 0, 0, 337, + 339, 1, 0, 0, 0, 338, 336, 1, 0, 0, 0, 339, 340, 5, 34, 0, 0, 340, 341, + 5, 34, 0, 0, 341, 407, 5, 34, 0, 0, 342, 343, 5, 39, 0, 0, 343, 344, 5, + 39, 0, 0, 344, 345, 5, 39, 0, 0, 345, 350, 1, 0, 0, 0, 346, 349, 3, 69, + 34, 0, 347, 349, 8, 12, 0, 0, 348, 346, 1, 0, 0, 0, 348, 347, 1, 0, 0, + 0, 349, 352, 1, 0, 0, 0, 350, 351, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, + 353, 1, 0, 0, 0, 352, 350, 1, 0, 0, 0, 353, 354, 5, 39, 0, 0, 354, 355, + 5, 39, 0, 0, 355, 407, 5, 39, 0, 0, 356, 357, 3, 67, 33, 0, 357, 361, 5, + 34, 0, 0, 358, 360, 8, 13, 0, 0, 359, 358, 1, 0, 0, 0, 360, 363, 1, 0, + 0, 0, 361, 359, 1, 0, 0, 0, 361, 362, 1, 0, 0, 0, 362, 364, 1, 0, 0, 0, + 363, 361, 1, 0, 0, 0, 364, 365, 5, 34, 0, 0, 365, 407, 1, 0, 0, 0, 366, + 367, 3, 67, 33, 0, 367, 371, 5, 39, 0, 0, 368, 370, 8, 14, 0, 0, 369, 368, + 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, + 0, 0, 372, 374, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 39, 0, 0, + 375, 407, 1, 0, 0, 0, 376, 377, 3, 67, 33, 0, 377, 378, 5, 34, 0, 0, 378, + 379, 5, 34, 0, 0, 379, 380, 5, 34, 0, 0, 380, 384, 1, 0, 0, 0, 381, 383, + 9, 0, 0, 0, 382, 381, 1, 0, 0, 0, 383, 386, 1, 0, 0, 0, 384, 385, 1, 0, + 0, 0, 384, 382, 1, 0, 0, 0, 385, 387, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, + 387, 388, 5, 34, 0, 0, 388, 389, 5, 34, 0, 0, 389, 390, 5, 34, 0, 0, 390, + 407, 1, 0, 0, 0, 391, 392, 3, 67, 33, 0, 392, 393, 5, 39, 0, 0, 393, 394, + 5, 39, 0, 0, 394, 395, 5, 39, 0, 0, 395, 399, 1, 0, 0, 0, 396, 398, 9, + 0, 0, 0, 397, 396, 1, 0, 0, 0, 398, 401, 1, 0, 0, 0, 399, 400, 1, 0, 0, + 0, 399, 397, 1, 0, 0, 0, 400, 402, 1, 0, 0, 0, 401, 399, 1, 0, 0, 0, 402, + 403, 5, 39, 0, 0, 403, 404, 5, 39, 0, 0, 404, 405, 5, 39, 0, 0, 405, 407, + 1, 0, 0, 0, 406, 310, 1, 0, 0, 0, 406, 319, 1, 0, 0, 0, 406, 328, 1, 0, + 0, 0, 406, 342, 1, 0, 0, 0, 406, 356, 1, 0, 0, 0, 406, 366, 1, 0, 0, 0, + 406, 376, 1, 0, 0, 0, 406, 391, 1, 0, 0, 0, 407, 90, 1, 0, 0, 0, 408, 409, + 7, 15, 0, 0, 409, 410, 3, 89, 44, 0, 410, 92, 1, 0, 0, 0, 411, 414, 3, + 59, 29, 0, 412, 414, 5, 95, 0, 0, 413, 411, 1, 0, 0, 0, 413, 412, 1, 0, + 0, 0, 414, 420, 1, 0, 0, 0, 415, 419, 3, 59, 29, 0, 416, 419, 3, 61, 30, + 0, 417, 419, 5, 95, 0, 0, 418, 415, 1, 0, 0, 0, 418, 416, 1, 0, 0, 0, 418, + 417, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, + 1, 0, 0, 0, 421, 94, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 36, 0, 176, 181, + 191, 224, 229, 239, 247, 253, 256, 261, 269, 272, 274, 279, 287, 289, 294, + 304, 308, 313, 315, 322, 324, 334, 336, 348, 350, 361, 371, 384, 399, 406, + 413, 418, 420, 1, 0, 1, 0, +} + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.Deserialize(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } } // CELLexerInit initializes any static state used to implement CELLexer. By default the @@ -282,22 +280,22 @@ func cellexerLexerInit() { // NewCELLexer(). You can call this function if you wish to initialize the static state ahead // of time. func CELLexerInit() { - staticData := &cellexerLexerStaticData - staticData.once.Do(cellexerLexerInit) + staticData := &CELLexerLexerStaticData + staticData.once.Do(cellexerLexerInit) } // NewCELLexer produces a new lexer instance for the optional input antlr.CharStream. func NewCELLexer(input antlr.CharStream) *CELLexer { - CELLexerInit() + CELLexerInit() l := new(CELLexer) l.BaseLexer = antlr.NewBaseLexer(input) - staticData := &cellexerLexerStaticData - l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) - l.channelNames = staticData.channelNames - l.modeNames = staticData.modeNames - l.RuleNames = staticData.ruleNames - l.LiteralNames = staticData.literalNames - l.SymbolicNames = staticData.symbolicNames + staticData := &CELLexerLexerStaticData + l.Interpreter = antlr.NewLexerATNSimulator(l, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache) + l.channelNames = staticData.ChannelNames + l.modeNames = staticData.ModeNames + l.RuleNames = staticData.RuleNames + l.LiteralNames = staticData.LiteralNames + l.SymbolicNames = staticData.SymbolicNames l.GrammarFileName = "CEL.g4" // TODO: l.EOF = antlr.TokenEOF @@ -306,40 +304,41 @@ func NewCELLexer(input antlr.CharStream) *CELLexer { // CELLexer tokens. const ( - CELLexerEQUALS = 1 - CELLexerNOT_EQUALS = 2 - CELLexerIN = 3 - CELLexerLESS = 4 - CELLexerLESS_EQUALS = 5 + CELLexerEQUALS = 1 + CELLexerNOT_EQUALS = 2 + CELLexerIN = 3 + CELLexerLESS = 4 + CELLexerLESS_EQUALS = 5 CELLexerGREATER_EQUALS = 6 - CELLexerGREATER = 7 - CELLexerLOGICAL_AND = 8 - CELLexerLOGICAL_OR = 9 - CELLexerLBRACKET = 10 - CELLexerRPRACKET = 11 - CELLexerLBRACE = 12 - CELLexerRBRACE = 13 - CELLexerLPAREN = 14 - CELLexerRPAREN = 15 - CELLexerDOT = 16 - CELLexerCOMMA = 17 - CELLexerMINUS = 18 - CELLexerEXCLAM = 19 - CELLexerQUESTIONMARK = 20 - CELLexerCOLON = 21 - CELLexerPLUS = 22 - CELLexerSTAR = 23 - CELLexerSLASH = 24 - CELLexerPERCENT = 25 - CELLexerCEL_TRUE = 26 - CELLexerCEL_FALSE = 27 - CELLexerNUL = 28 - CELLexerWHITESPACE = 29 - CELLexerCOMMENT = 30 - CELLexerNUM_FLOAT = 31 - CELLexerNUM_INT = 32 - CELLexerNUM_UINT = 33 - CELLexerSTRING = 34 - CELLexerBYTES = 35 - CELLexerIDENTIFIER = 36 + CELLexerGREATER = 7 + CELLexerLOGICAL_AND = 8 + CELLexerLOGICAL_OR = 9 + CELLexerLBRACKET = 10 + CELLexerRPRACKET = 11 + CELLexerLBRACE = 12 + CELLexerRBRACE = 13 + CELLexerLPAREN = 14 + CELLexerRPAREN = 15 + CELLexerDOT = 16 + CELLexerCOMMA = 17 + CELLexerMINUS = 18 + CELLexerEXCLAM = 19 + CELLexerQUESTIONMARK = 20 + CELLexerCOLON = 21 + CELLexerPLUS = 22 + CELLexerSTAR = 23 + CELLexerSLASH = 24 + CELLexerPERCENT = 25 + CELLexerCEL_TRUE = 26 + CELLexerCEL_FALSE = 27 + CELLexerNUL = 28 + CELLexerWHITESPACE = 29 + CELLexerCOMMENT = 30 + CELLexerNUM_FLOAT = 31 + CELLexerNUM_INT = 32 + CELLexerNUM_UINT = 33 + CELLexerSTRING = 34 + CELLexerBYTES = 35 + CELLexerIDENTIFIER = 36 ) + diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_listener.go b/vendor/github.com/google/cel-go/parser/gen/cel_listener.go index 73b7f1d39..22dc99789 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_listener.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_listener.go @@ -1,7 +1,8 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen // CEL -import "github.com/antlr/antlr4/runtime/Go/antlr/v4" +import "github.com/antlr4-go/antlr/v4" + // CELListener is a complete listener for a parse tree produced by CELParser. type CELListener interface { diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_parser.go b/vendor/github.com/google/cel-go/parser/gen/cel_parser.go index 0cb6c8eae..35334af61 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_parser.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_parser.go @@ -1,12 +1,12 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen // CEL import ( "fmt" "strconv" - "sync" + "sync" - "github.com/antlr/antlr4/runtime/Go/antlr/v4" + "github.com/antlr4-go/antlr/v4" ) // Suppress unused import errors @@ -14,166 +14,167 @@ var _ = fmt.Printf var _ = strconv.Itoa var _ = sync.Once{} + type CELParser struct { *antlr.BaseParser } -var celParserStaticData struct { - once sync.Once - serializedATN []int32 - literalNames []string - symbolicNames []string - ruleNames []string - predictionContextCache *antlr.PredictionContextCache - atn *antlr.ATN - decisionToDFA []*antlr.DFA +var CELParserStaticData struct { + once sync.Once + serializedATN []int32 + LiteralNames []string + SymbolicNames []string + RuleNames []string + PredictionContextCache *antlr.PredictionContextCache + atn *antlr.ATN + decisionToDFA []*antlr.DFA } func celParserInit() { - staticData := &celParserStaticData - staticData.literalNames = []string{ - "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'", - "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'", - "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'", - } - staticData.symbolicNames = []string{ - "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", - "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", - "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", - "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", - "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", - "STRING", "BYTES", "IDENTIFIER", - } - staticData.ruleNames = []string{ - "start", "expr", "conditionalOr", "conditionalAnd", "relation", "calc", - "unary", "member", "primary", "exprList", "listInit", "fieldInitializerList", - "optField", "mapInitializerList", "optExpr", "literal", - } - staticData.predictionContextCache = antlr.NewPredictionContextCache() - staticData.serializedATN = []int32{ - 4, 1, 36, 251, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, - 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, - 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, - 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 42, 8, 1, 1, - 2, 1, 2, 1, 2, 5, 2, 47, 8, 2, 10, 2, 12, 2, 50, 9, 2, 1, 3, 1, 3, 1, 3, - 5, 3, 55, 8, 3, 10, 3, 12, 3, 58, 9, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, - 4, 5, 4, 66, 8, 4, 10, 4, 12, 4, 69, 9, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, - 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 80, 8, 5, 10, 5, 12, 5, 83, 9, 5, 1, 6, 1, - 6, 4, 6, 87, 8, 6, 11, 6, 12, 6, 88, 1, 6, 1, 6, 4, 6, 93, 8, 6, 11, 6, - 12, 6, 94, 1, 6, 3, 6, 98, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, - 7, 106, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 114, 8, 7, 1, 7, - 1, 7, 1, 7, 1, 7, 3, 7, 120, 8, 7, 1, 7, 1, 7, 1, 7, 5, 7, 125, 8, 7, 10, - 7, 12, 7, 128, 9, 7, 1, 8, 3, 8, 131, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 136, - 8, 8, 1, 8, 3, 8, 139, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, - 147, 8, 8, 1, 8, 3, 8, 150, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 155, 8, 8, 1, - 8, 3, 8, 158, 8, 8, 1, 8, 1, 8, 3, 8, 162, 8, 8, 1, 8, 1, 8, 1, 8, 5, 8, - 167, 8, 8, 10, 8, 12, 8, 170, 9, 8, 1, 8, 1, 8, 3, 8, 174, 8, 8, 1, 8, - 3, 8, 177, 8, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186, - 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 194, 8, 10, - 10, 10, 12, 10, 197, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, - 11, 1, 11, 5, 11, 207, 8, 11, 10, 11, 12, 11, 210, 9, 11, 1, 12, 3, 12, - 213, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, - 13, 1, 13, 5, 13, 225, 8, 13, 10, 13, 12, 13, 228, 9, 13, 1, 14, 3, 14, - 231, 8, 14, 1, 14, 1, 14, 1, 15, 3, 15, 236, 8, 15, 1, 15, 1, 15, 1, 15, - 3, 15, 241, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 249, - 8, 15, 1, 15, 0, 3, 8, 10, 14, 16, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, - 22, 24, 26, 28, 30, 0, 3, 1, 0, 1, 7, 1, 0, 23, 25, 2, 0, 18, 18, 22, 22, - 281, 0, 32, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 51, 1, - 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 97, 1, 0, 0, 0, 14, - 99, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0, - 0, 0, 22, 198, 1, 0, 0, 0, 24, 212, 1, 0, 0, 0, 26, 216, 1, 0, 0, 0, 28, - 230, 1, 0, 0, 0, 30, 248, 1, 0, 0, 0, 32, 33, 3, 2, 1, 0, 33, 34, 5, 0, - 0, 1, 34, 1, 1, 0, 0, 0, 35, 41, 3, 4, 2, 0, 36, 37, 5, 20, 0, 0, 37, 38, - 3, 4, 2, 0, 38, 39, 5, 21, 0, 0, 39, 40, 3, 2, 1, 0, 40, 42, 1, 0, 0, 0, - 41, 36, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 48, 3, 6, - 3, 0, 44, 45, 5, 9, 0, 0, 45, 47, 3, 6, 3, 0, 46, 44, 1, 0, 0, 0, 47, 50, - 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 5, 1, 0, 0, 0, - 50, 48, 1, 0, 0, 0, 51, 56, 3, 8, 4, 0, 52, 53, 5, 8, 0, 0, 53, 55, 3, - 8, 4, 0, 54, 52, 1, 0, 0, 0, 55, 58, 1, 0, 0, 0, 56, 54, 1, 0, 0, 0, 56, - 57, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 59, 60, 6, 4, -1, - 0, 60, 61, 3, 10, 5, 0, 61, 67, 1, 0, 0, 0, 62, 63, 10, 1, 0, 0, 63, 64, - 7, 0, 0, 0, 64, 66, 3, 8, 4, 2, 65, 62, 1, 0, 0, 0, 66, 69, 1, 0, 0, 0, - 67, 65, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 9, 1, 0, 0, 0, 69, 67, 1, 0, - 0, 0, 70, 71, 6, 5, -1, 0, 71, 72, 3, 12, 6, 0, 72, 81, 1, 0, 0, 0, 73, - 74, 10, 2, 0, 0, 74, 75, 7, 1, 0, 0, 75, 80, 3, 10, 5, 3, 76, 77, 10, 1, - 0, 0, 77, 78, 7, 2, 0, 0, 78, 80, 3, 10, 5, 2, 79, 73, 1, 0, 0, 0, 79, - 76, 1, 0, 0, 0, 80, 83, 1, 0, 0, 0, 81, 79, 1, 0, 0, 0, 81, 82, 1, 0, 0, - 0, 82, 11, 1, 0, 0, 0, 83, 81, 1, 0, 0, 0, 84, 98, 3, 14, 7, 0, 85, 87, - 5, 19, 0, 0, 86, 85, 1, 0, 0, 0, 87, 88, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, - 88, 89, 1, 0, 0, 0, 89, 90, 1, 0, 0, 0, 90, 98, 3, 14, 7, 0, 91, 93, 5, - 18, 0, 0, 92, 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 92, 1, 0, 0, 0, 94, - 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 98, 3, 14, 7, 0, 97, 84, 1, 0, - 0, 0, 97, 86, 1, 0, 0, 0, 97, 92, 1, 0, 0, 0, 98, 13, 1, 0, 0, 0, 99, 100, - 6, 7, -1, 0, 100, 101, 3, 16, 8, 0, 101, 126, 1, 0, 0, 0, 102, 103, 10, - 3, 0, 0, 103, 105, 5, 16, 0, 0, 104, 106, 5, 20, 0, 0, 105, 104, 1, 0, - 0, 0, 105, 106, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 125, 5, 36, 0, 0, - 108, 109, 10, 2, 0, 0, 109, 110, 5, 16, 0, 0, 110, 111, 5, 36, 0, 0, 111, - 113, 5, 14, 0, 0, 112, 114, 3, 18, 9, 0, 113, 112, 1, 0, 0, 0, 113, 114, - 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 125, 5, 15, 0, 0, 116, 117, 10, - 1, 0, 0, 117, 119, 5, 10, 0, 0, 118, 120, 5, 20, 0, 0, 119, 118, 1, 0, - 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 3, 2, 1, 0, - 122, 123, 5, 11, 0, 0, 123, 125, 1, 0, 0, 0, 124, 102, 1, 0, 0, 0, 124, - 108, 1, 0, 0, 0, 124, 116, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124, - 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 15, 1, 0, 0, 0, 128, 126, 1, 0, - 0, 0, 129, 131, 5, 16, 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, - 131, 132, 1, 0, 0, 0, 132, 138, 5, 36, 0, 0, 133, 135, 5, 14, 0, 0, 134, - 136, 3, 18, 9, 0, 135, 134, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 137, - 1, 0, 0, 0, 137, 139, 5, 15, 0, 0, 138, 133, 1, 0, 0, 0, 138, 139, 1, 0, - 0, 0, 139, 181, 1, 0, 0, 0, 140, 141, 5, 14, 0, 0, 141, 142, 3, 2, 1, 0, - 142, 143, 5, 15, 0, 0, 143, 181, 1, 0, 0, 0, 144, 146, 5, 10, 0, 0, 145, - 147, 3, 20, 10, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149, - 1, 0, 0, 0, 148, 150, 5, 17, 0, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, - 0, 0, 150, 151, 1, 0, 0, 0, 151, 181, 5, 11, 0, 0, 152, 154, 5, 12, 0, - 0, 153, 155, 3, 26, 13, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, - 155, 157, 1, 0, 0, 0, 156, 158, 5, 17, 0, 0, 157, 156, 1, 0, 0, 0, 157, - 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 181, 5, 13, 0, 0, 160, 162, - 5, 16, 0, 0, 161, 160, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, - 0, 0, 163, 168, 5, 36, 0, 0, 164, 165, 5, 16, 0, 0, 165, 167, 5, 36, 0, - 0, 166, 164, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168, - 169, 1, 0, 0, 0, 169, 171, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173, - 5, 12, 0, 0, 172, 174, 3, 22, 11, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1, - 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 177, 5, 17, 0, 0, 176, 175, 1, 0, 0, - 0, 176, 177, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 181, 5, 13, 0, 0, 179, - 181, 3, 30, 15, 0, 180, 130, 1, 0, 0, 0, 180, 140, 1, 0, 0, 0, 180, 144, - 1, 0, 0, 0, 180, 152, 1, 0, 0, 0, 180, 161, 1, 0, 0, 0, 180, 179, 1, 0, - 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 2, 1, 0, 183, 184, 5, 17, 0, 0, - 184, 186, 3, 2, 1, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187, - 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1, - 0, 0, 0, 190, 195, 3, 28, 14, 0, 191, 192, 5, 17, 0, 0, 192, 194, 3, 28, - 14, 0, 193, 191, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, - 195, 196, 1, 0, 0, 0, 196, 21, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 199, - 3, 24, 12, 0, 199, 200, 5, 21, 0, 0, 200, 208, 3, 2, 1, 0, 201, 202, 5, - 17, 0, 0, 202, 203, 3, 24, 12, 0, 203, 204, 5, 21, 0, 0, 204, 205, 3, 2, - 1, 0, 205, 207, 1, 0, 0, 0, 206, 201, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, - 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 23, 1, 0, 0, 0, 210, 208, - 1, 0, 0, 0, 211, 213, 5, 20, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, - 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 36, 0, 0, 215, 25, 1, 0, 0, 0, - 216, 217, 3, 28, 14, 0, 217, 218, 5, 21, 0, 0, 218, 226, 3, 2, 1, 0, 219, - 220, 5, 17, 0, 0, 220, 221, 3, 28, 14, 0, 221, 222, 5, 21, 0, 0, 222, 223, - 3, 2, 1, 0, 223, 225, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 225, 228, 1, 0, - 0, 0, 226, 224, 1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 27, 1, 0, 0, 0, - 228, 226, 1, 0, 0, 0, 229, 231, 5, 20, 0, 0, 230, 229, 1, 0, 0, 0, 230, - 231, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 3, 2, 1, 0, 233, 29, 1, - 0, 0, 0, 234, 236, 5, 18, 0, 0, 235, 234, 1, 0, 0, 0, 235, 236, 1, 0, 0, - 0, 236, 237, 1, 0, 0, 0, 237, 249, 5, 32, 0, 0, 238, 249, 5, 33, 0, 0, - 239, 241, 5, 18, 0, 0, 240, 239, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, - 242, 1, 0, 0, 0, 242, 249, 5, 31, 0, 0, 243, 249, 5, 34, 0, 0, 244, 249, - 5, 35, 0, 0, 245, 249, 5, 26, 0, 0, 246, 249, 5, 27, 0, 0, 247, 249, 5, - 28, 0, 0, 248, 235, 1, 0, 0, 0, 248, 238, 1, 0, 0, 0, 248, 240, 1, 0, 0, - 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 248, 245, 1, 0, 0, 0, 248, - 246, 1, 0, 0, 0, 248, 247, 1, 0, 0, 0, 249, 31, 1, 0, 0, 0, 35, 41, 48, - 56, 67, 79, 81, 88, 94, 97, 105, 113, 119, 124, 126, 130, 135, 138, 146, - 149, 154, 157, 161, 168, 173, 176, 180, 187, 195, 208, 212, 226, 230, 235, - 240, 248, - } - deserializer := antlr.NewATNDeserializer(nil) - staticData.atn = deserializer.Deserialize(staticData.serializedATN) - atn := staticData.atn - staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) - decisionToDFA := staticData.decisionToDFA - for index, state := range atn.DecisionToState { - decisionToDFA[index] = antlr.NewDFA(state, index) - } + staticData := &CELParserStaticData + staticData.LiteralNames = []string{ + "", "'=='", "'!='", "'in'", "'<'", "'<='", "'>='", "'>'", "'&&'", "'||'", + "'['", "']'", "'{'", "'}'", "'('", "')'", "'.'", "','", "'-'", "'!'", + "'?'", "':'", "'+'", "'*'", "'/'", "'%'", "'true'", "'false'", "'null'", + } + staticData.SymbolicNames = []string{ + "", "EQUALS", "NOT_EQUALS", "IN", "LESS", "LESS_EQUALS", "GREATER_EQUALS", + "GREATER", "LOGICAL_AND", "LOGICAL_OR", "LBRACKET", "RPRACKET", "LBRACE", + "RBRACE", "LPAREN", "RPAREN", "DOT", "COMMA", "MINUS", "EXCLAM", "QUESTIONMARK", + "COLON", "PLUS", "STAR", "SLASH", "PERCENT", "CEL_TRUE", "CEL_FALSE", + "NUL", "WHITESPACE", "COMMENT", "NUM_FLOAT", "NUM_INT", "NUM_UINT", + "STRING", "BYTES", "IDENTIFIER", + } + staticData.RuleNames = []string{ + "start", "expr", "conditionalOr", "conditionalAnd", "relation", "calc", + "unary", "member", "primary", "exprList", "listInit", "fieldInitializerList", + "optField", "mapInitializerList", "optExpr", "literal", + } + staticData.PredictionContextCache = antlr.NewPredictionContextCache() + staticData.serializedATN = []int32{ + 4, 1, 36, 251, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, + 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, + 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, + 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 1, 42, 8, 1, 1, + 2, 1, 2, 1, 2, 5, 2, 47, 8, 2, 10, 2, 12, 2, 50, 9, 2, 1, 3, 1, 3, 1, 3, + 5, 3, 55, 8, 3, 10, 3, 12, 3, 58, 9, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, + 4, 5, 4, 66, 8, 4, 10, 4, 12, 4, 69, 9, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, + 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 80, 8, 5, 10, 5, 12, 5, 83, 9, 5, 1, 6, 1, + 6, 4, 6, 87, 8, 6, 11, 6, 12, 6, 88, 1, 6, 1, 6, 4, 6, 93, 8, 6, 11, 6, + 12, 6, 94, 1, 6, 3, 6, 98, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, + 7, 106, 8, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 114, 8, 7, 1, 7, + 1, 7, 1, 7, 1, 7, 3, 7, 120, 8, 7, 1, 7, 1, 7, 1, 7, 5, 7, 125, 8, 7, 10, + 7, 12, 7, 128, 9, 7, 1, 8, 3, 8, 131, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 136, + 8, 8, 1, 8, 3, 8, 139, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, + 147, 8, 8, 1, 8, 3, 8, 150, 8, 8, 1, 8, 1, 8, 1, 8, 3, 8, 155, 8, 8, 1, + 8, 3, 8, 158, 8, 8, 1, 8, 1, 8, 3, 8, 162, 8, 8, 1, 8, 1, 8, 1, 8, 5, 8, + 167, 8, 8, 10, 8, 12, 8, 170, 9, 8, 1, 8, 1, 8, 3, 8, 174, 8, 8, 1, 8, + 3, 8, 177, 8, 8, 1, 8, 1, 8, 3, 8, 181, 8, 8, 1, 9, 1, 9, 1, 9, 5, 9, 186, + 8, 9, 10, 9, 12, 9, 189, 9, 9, 1, 10, 1, 10, 1, 10, 5, 10, 194, 8, 10, + 10, 10, 12, 10, 197, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, + 11, 1, 11, 5, 11, 207, 8, 11, 10, 11, 12, 11, 210, 9, 11, 1, 12, 3, 12, + 213, 8, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, + 13, 1, 13, 5, 13, 225, 8, 13, 10, 13, 12, 13, 228, 9, 13, 1, 14, 3, 14, + 231, 8, 14, 1, 14, 1, 14, 1, 15, 3, 15, 236, 8, 15, 1, 15, 1, 15, 1, 15, + 3, 15, 241, 8, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 3, 15, 249, + 8, 15, 1, 15, 0, 3, 8, 10, 14, 16, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, + 22, 24, 26, 28, 30, 0, 3, 1, 0, 1, 7, 1, 0, 23, 25, 2, 0, 18, 18, 22, 22, + 281, 0, 32, 1, 0, 0, 0, 2, 35, 1, 0, 0, 0, 4, 43, 1, 0, 0, 0, 6, 51, 1, + 0, 0, 0, 8, 59, 1, 0, 0, 0, 10, 70, 1, 0, 0, 0, 12, 97, 1, 0, 0, 0, 14, + 99, 1, 0, 0, 0, 16, 180, 1, 0, 0, 0, 18, 182, 1, 0, 0, 0, 20, 190, 1, 0, + 0, 0, 22, 198, 1, 0, 0, 0, 24, 212, 1, 0, 0, 0, 26, 216, 1, 0, 0, 0, 28, + 230, 1, 0, 0, 0, 30, 248, 1, 0, 0, 0, 32, 33, 3, 2, 1, 0, 33, 34, 5, 0, + 0, 1, 34, 1, 1, 0, 0, 0, 35, 41, 3, 4, 2, 0, 36, 37, 5, 20, 0, 0, 37, 38, + 3, 4, 2, 0, 38, 39, 5, 21, 0, 0, 39, 40, 3, 2, 1, 0, 40, 42, 1, 0, 0, 0, + 41, 36, 1, 0, 0, 0, 41, 42, 1, 0, 0, 0, 42, 3, 1, 0, 0, 0, 43, 48, 3, 6, + 3, 0, 44, 45, 5, 9, 0, 0, 45, 47, 3, 6, 3, 0, 46, 44, 1, 0, 0, 0, 47, 50, + 1, 0, 0, 0, 48, 46, 1, 0, 0, 0, 48, 49, 1, 0, 0, 0, 49, 5, 1, 0, 0, 0, + 50, 48, 1, 0, 0, 0, 51, 56, 3, 8, 4, 0, 52, 53, 5, 8, 0, 0, 53, 55, 3, + 8, 4, 0, 54, 52, 1, 0, 0, 0, 55, 58, 1, 0, 0, 0, 56, 54, 1, 0, 0, 0, 56, + 57, 1, 0, 0, 0, 57, 7, 1, 0, 0, 0, 58, 56, 1, 0, 0, 0, 59, 60, 6, 4, -1, + 0, 60, 61, 3, 10, 5, 0, 61, 67, 1, 0, 0, 0, 62, 63, 10, 1, 0, 0, 63, 64, + 7, 0, 0, 0, 64, 66, 3, 8, 4, 2, 65, 62, 1, 0, 0, 0, 66, 69, 1, 0, 0, 0, + 67, 65, 1, 0, 0, 0, 67, 68, 1, 0, 0, 0, 68, 9, 1, 0, 0, 0, 69, 67, 1, 0, + 0, 0, 70, 71, 6, 5, -1, 0, 71, 72, 3, 12, 6, 0, 72, 81, 1, 0, 0, 0, 73, + 74, 10, 2, 0, 0, 74, 75, 7, 1, 0, 0, 75, 80, 3, 10, 5, 3, 76, 77, 10, 1, + 0, 0, 77, 78, 7, 2, 0, 0, 78, 80, 3, 10, 5, 2, 79, 73, 1, 0, 0, 0, 79, + 76, 1, 0, 0, 0, 80, 83, 1, 0, 0, 0, 81, 79, 1, 0, 0, 0, 81, 82, 1, 0, 0, + 0, 82, 11, 1, 0, 0, 0, 83, 81, 1, 0, 0, 0, 84, 98, 3, 14, 7, 0, 85, 87, + 5, 19, 0, 0, 86, 85, 1, 0, 0, 0, 87, 88, 1, 0, 0, 0, 88, 86, 1, 0, 0, 0, + 88, 89, 1, 0, 0, 0, 89, 90, 1, 0, 0, 0, 90, 98, 3, 14, 7, 0, 91, 93, 5, + 18, 0, 0, 92, 91, 1, 0, 0, 0, 93, 94, 1, 0, 0, 0, 94, 92, 1, 0, 0, 0, 94, + 95, 1, 0, 0, 0, 95, 96, 1, 0, 0, 0, 96, 98, 3, 14, 7, 0, 97, 84, 1, 0, + 0, 0, 97, 86, 1, 0, 0, 0, 97, 92, 1, 0, 0, 0, 98, 13, 1, 0, 0, 0, 99, 100, + 6, 7, -1, 0, 100, 101, 3, 16, 8, 0, 101, 126, 1, 0, 0, 0, 102, 103, 10, + 3, 0, 0, 103, 105, 5, 16, 0, 0, 104, 106, 5, 20, 0, 0, 105, 104, 1, 0, + 0, 0, 105, 106, 1, 0, 0, 0, 106, 107, 1, 0, 0, 0, 107, 125, 5, 36, 0, 0, + 108, 109, 10, 2, 0, 0, 109, 110, 5, 16, 0, 0, 110, 111, 5, 36, 0, 0, 111, + 113, 5, 14, 0, 0, 112, 114, 3, 18, 9, 0, 113, 112, 1, 0, 0, 0, 113, 114, + 1, 0, 0, 0, 114, 115, 1, 0, 0, 0, 115, 125, 5, 15, 0, 0, 116, 117, 10, + 1, 0, 0, 117, 119, 5, 10, 0, 0, 118, 120, 5, 20, 0, 0, 119, 118, 1, 0, + 0, 0, 119, 120, 1, 0, 0, 0, 120, 121, 1, 0, 0, 0, 121, 122, 3, 2, 1, 0, + 122, 123, 5, 11, 0, 0, 123, 125, 1, 0, 0, 0, 124, 102, 1, 0, 0, 0, 124, + 108, 1, 0, 0, 0, 124, 116, 1, 0, 0, 0, 125, 128, 1, 0, 0, 0, 126, 124, + 1, 0, 0, 0, 126, 127, 1, 0, 0, 0, 127, 15, 1, 0, 0, 0, 128, 126, 1, 0, + 0, 0, 129, 131, 5, 16, 0, 0, 130, 129, 1, 0, 0, 0, 130, 131, 1, 0, 0, 0, + 131, 132, 1, 0, 0, 0, 132, 138, 5, 36, 0, 0, 133, 135, 5, 14, 0, 0, 134, + 136, 3, 18, 9, 0, 135, 134, 1, 0, 0, 0, 135, 136, 1, 0, 0, 0, 136, 137, + 1, 0, 0, 0, 137, 139, 5, 15, 0, 0, 138, 133, 1, 0, 0, 0, 138, 139, 1, 0, + 0, 0, 139, 181, 1, 0, 0, 0, 140, 141, 5, 14, 0, 0, 141, 142, 3, 2, 1, 0, + 142, 143, 5, 15, 0, 0, 143, 181, 1, 0, 0, 0, 144, 146, 5, 10, 0, 0, 145, + 147, 3, 20, 10, 0, 146, 145, 1, 0, 0, 0, 146, 147, 1, 0, 0, 0, 147, 149, + 1, 0, 0, 0, 148, 150, 5, 17, 0, 0, 149, 148, 1, 0, 0, 0, 149, 150, 1, 0, + 0, 0, 150, 151, 1, 0, 0, 0, 151, 181, 5, 11, 0, 0, 152, 154, 5, 12, 0, + 0, 153, 155, 3, 26, 13, 0, 154, 153, 1, 0, 0, 0, 154, 155, 1, 0, 0, 0, + 155, 157, 1, 0, 0, 0, 156, 158, 5, 17, 0, 0, 157, 156, 1, 0, 0, 0, 157, + 158, 1, 0, 0, 0, 158, 159, 1, 0, 0, 0, 159, 181, 5, 13, 0, 0, 160, 162, + 5, 16, 0, 0, 161, 160, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 1, 0, + 0, 0, 163, 168, 5, 36, 0, 0, 164, 165, 5, 16, 0, 0, 165, 167, 5, 36, 0, + 0, 166, 164, 1, 0, 0, 0, 167, 170, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 168, + 169, 1, 0, 0, 0, 169, 171, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 171, 173, + 5, 12, 0, 0, 172, 174, 3, 22, 11, 0, 173, 172, 1, 0, 0, 0, 173, 174, 1, + 0, 0, 0, 174, 176, 1, 0, 0, 0, 175, 177, 5, 17, 0, 0, 176, 175, 1, 0, 0, + 0, 176, 177, 1, 0, 0, 0, 177, 178, 1, 0, 0, 0, 178, 181, 5, 13, 0, 0, 179, + 181, 3, 30, 15, 0, 180, 130, 1, 0, 0, 0, 180, 140, 1, 0, 0, 0, 180, 144, + 1, 0, 0, 0, 180, 152, 1, 0, 0, 0, 180, 161, 1, 0, 0, 0, 180, 179, 1, 0, + 0, 0, 181, 17, 1, 0, 0, 0, 182, 187, 3, 2, 1, 0, 183, 184, 5, 17, 0, 0, + 184, 186, 3, 2, 1, 0, 185, 183, 1, 0, 0, 0, 186, 189, 1, 0, 0, 0, 187, + 185, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 19, 1, 0, 0, 0, 189, 187, 1, + 0, 0, 0, 190, 195, 3, 28, 14, 0, 191, 192, 5, 17, 0, 0, 192, 194, 3, 28, + 14, 0, 193, 191, 1, 0, 0, 0, 194, 197, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, + 195, 196, 1, 0, 0, 0, 196, 21, 1, 0, 0, 0, 197, 195, 1, 0, 0, 0, 198, 199, + 3, 24, 12, 0, 199, 200, 5, 21, 0, 0, 200, 208, 3, 2, 1, 0, 201, 202, 5, + 17, 0, 0, 202, 203, 3, 24, 12, 0, 203, 204, 5, 21, 0, 0, 204, 205, 3, 2, + 1, 0, 205, 207, 1, 0, 0, 0, 206, 201, 1, 0, 0, 0, 207, 210, 1, 0, 0, 0, + 208, 206, 1, 0, 0, 0, 208, 209, 1, 0, 0, 0, 209, 23, 1, 0, 0, 0, 210, 208, + 1, 0, 0, 0, 211, 213, 5, 20, 0, 0, 212, 211, 1, 0, 0, 0, 212, 213, 1, 0, + 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 5, 36, 0, 0, 215, 25, 1, 0, 0, 0, + 216, 217, 3, 28, 14, 0, 217, 218, 5, 21, 0, 0, 218, 226, 3, 2, 1, 0, 219, + 220, 5, 17, 0, 0, 220, 221, 3, 28, 14, 0, 221, 222, 5, 21, 0, 0, 222, 223, + 3, 2, 1, 0, 223, 225, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 225, 228, 1, 0, + 0, 0, 226, 224, 1, 0, 0, 0, 226, 227, 1, 0, 0, 0, 227, 27, 1, 0, 0, 0, + 228, 226, 1, 0, 0, 0, 229, 231, 5, 20, 0, 0, 230, 229, 1, 0, 0, 0, 230, + 231, 1, 0, 0, 0, 231, 232, 1, 0, 0, 0, 232, 233, 3, 2, 1, 0, 233, 29, 1, + 0, 0, 0, 234, 236, 5, 18, 0, 0, 235, 234, 1, 0, 0, 0, 235, 236, 1, 0, 0, + 0, 236, 237, 1, 0, 0, 0, 237, 249, 5, 32, 0, 0, 238, 249, 5, 33, 0, 0, + 239, 241, 5, 18, 0, 0, 240, 239, 1, 0, 0, 0, 240, 241, 1, 0, 0, 0, 241, + 242, 1, 0, 0, 0, 242, 249, 5, 31, 0, 0, 243, 249, 5, 34, 0, 0, 244, 249, + 5, 35, 0, 0, 245, 249, 5, 26, 0, 0, 246, 249, 5, 27, 0, 0, 247, 249, 5, + 28, 0, 0, 248, 235, 1, 0, 0, 0, 248, 238, 1, 0, 0, 0, 248, 240, 1, 0, 0, + 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 248, 245, 1, 0, 0, 0, 248, + 246, 1, 0, 0, 0, 248, 247, 1, 0, 0, 0, 249, 31, 1, 0, 0, 0, 35, 41, 48, + 56, 67, 79, 81, 88, 94, 97, 105, 113, 119, 124, 126, 130, 135, 138, 146, + 149, 154, 157, 161, 168, 173, 176, 180, 187, 195, 208, 212, 226, 230, 235, + 240, 248, +} + deserializer := antlr.NewATNDeserializer(nil) + staticData.atn = deserializer.Deserialize(staticData.serializedATN) + atn := staticData.atn + staticData.decisionToDFA = make([]*antlr.DFA, len(atn.DecisionToState)) + decisionToDFA := staticData.decisionToDFA + for index, state := range atn.DecisionToState { + decisionToDFA[index] = antlr.NewDFA(state, index) + } } // CELParserInit initializes any static state used to implement CELParser. By default the @@ -181,8 +182,8 @@ func celParserInit() { // NewCELParser(). You can call this function if you wish to initialize the static state ahead // of time. func CELParserInit() { - staticData := &celParserStaticData - staticData.once.Do(celParserInit) + staticData := &CELParserStaticData + staticData.once.Do(celParserInit) } // NewCELParser produces a new parser instance for the optional input antlr.TokenStream. @@ -190,75 +191,76 @@ func NewCELParser(input antlr.TokenStream) *CELParser { CELParserInit() this := new(CELParser) this.BaseParser = antlr.NewBaseParser(input) - staticData := &celParserStaticData - this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.predictionContextCache) - this.RuleNames = staticData.ruleNames - this.LiteralNames = staticData.literalNames - this.SymbolicNames = staticData.symbolicNames + staticData := &CELParserStaticData + this.Interpreter = antlr.NewParserATNSimulator(this, staticData.atn, staticData.decisionToDFA, staticData.PredictionContextCache) + this.RuleNames = staticData.RuleNames + this.LiteralNames = staticData.LiteralNames + this.SymbolicNames = staticData.SymbolicNames this.GrammarFileName = "CEL.g4" return this } + // CELParser tokens. const ( - CELParserEOF = antlr.TokenEOF - CELParserEQUALS = 1 - CELParserNOT_EQUALS = 2 - CELParserIN = 3 - CELParserLESS = 4 - CELParserLESS_EQUALS = 5 + CELParserEOF = antlr.TokenEOF + CELParserEQUALS = 1 + CELParserNOT_EQUALS = 2 + CELParserIN = 3 + CELParserLESS = 4 + CELParserLESS_EQUALS = 5 CELParserGREATER_EQUALS = 6 - CELParserGREATER = 7 - CELParserLOGICAL_AND = 8 - CELParserLOGICAL_OR = 9 - CELParserLBRACKET = 10 - CELParserRPRACKET = 11 - CELParserLBRACE = 12 - CELParserRBRACE = 13 - CELParserLPAREN = 14 - CELParserRPAREN = 15 - CELParserDOT = 16 - CELParserCOMMA = 17 - CELParserMINUS = 18 - CELParserEXCLAM = 19 - CELParserQUESTIONMARK = 20 - CELParserCOLON = 21 - CELParserPLUS = 22 - CELParserSTAR = 23 - CELParserSLASH = 24 - CELParserPERCENT = 25 - CELParserCEL_TRUE = 26 - CELParserCEL_FALSE = 27 - CELParserNUL = 28 - CELParserWHITESPACE = 29 - CELParserCOMMENT = 30 - CELParserNUM_FLOAT = 31 - CELParserNUM_INT = 32 - CELParserNUM_UINT = 33 - CELParserSTRING = 34 - CELParserBYTES = 35 - CELParserIDENTIFIER = 36 + CELParserGREATER = 7 + CELParserLOGICAL_AND = 8 + CELParserLOGICAL_OR = 9 + CELParserLBRACKET = 10 + CELParserRPRACKET = 11 + CELParserLBRACE = 12 + CELParserRBRACE = 13 + CELParserLPAREN = 14 + CELParserRPAREN = 15 + CELParserDOT = 16 + CELParserCOMMA = 17 + CELParserMINUS = 18 + CELParserEXCLAM = 19 + CELParserQUESTIONMARK = 20 + CELParserCOLON = 21 + CELParserPLUS = 22 + CELParserSTAR = 23 + CELParserSLASH = 24 + CELParserPERCENT = 25 + CELParserCEL_TRUE = 26 + CELParserCEL_FALSE = 27 + CELParserNUL = 28 + CELParserWHITESPACE = 29 + CELParserCOMMENT = 30 + CELParserNUM_FLOAT = 31 + CELParserNUM_INT = 32 + CELParserNUM_UINT = 33 + CELParserSTRING = 34 + CELParserBYTES = 35 + CELParserIDENTIFIER = 36 ) // CELParser rules. const ( - CELParserRULE_start = 0 - CELParserRULE_expr = 1 - CELParserRULE_conditionalOr = 2 - CELParserRULE_conditionalAnd = 3 - CELParserRULE_relation = 4 - CELParserRULE_calc = 5 - CELParserRULE_unary = 6 - CELParserRULE_member = 7 - CELParserRULE_primary = 8 - CELParserRULE_exprList = 9 - CELParserRULE_listInit = 10 + CELParserRULE_start = 0 + CELParserRULE_expr = 1 + CELParserRULE_conditionalOr = 2 + CELParserRULE_conditionalAnd = 3 + CELParserRULE_relation = 4 + CELParserRULE_calc = 5 + CELParserRULE_unary = 6 + CELParserRULE_member = 7 + CELParserRULE_primary = 8 + CELParserRULE_exprList = 9 + CELParserRULE_listInit = 10 CELParserRULE_fieldInitializerList = 11 - CELParserRULE_optField = 12 - CELParserRULE_mapInitializerList = 13 - CELParserRULE_optExpr = 14 - CELParserRULE_literal = 15 + CELParserRULE_optField = 12 + CELParserRULE_mapInitializerList = 13 + CELParserRULE_optExpr = 14 + CELParserRULE_literal = 15 ) // IStartContext is an interface to support dynamic dispatch. @@ -271,9 +273,11 @@ type IStartContext interface { // GetE returns the e rule contexts. GetE() IExprContext + // SetE sets the e rule contexts. SetE(IExprContext) + // Getter signatures EOF() antlr.TerminalNode Expr() IExprContext @@ -283,24 +287,29 @@ type IStartContext interface { } type StartContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - e IExprContext + e IExprContext } func NewEmptyStartContext() *StartContext { var p = new(StartContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_start return p } +func InitEmptyStartContext(p *StartContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_start +} + func (*StartContext) IsStartContext() {} func NewStartContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *StartContext { var p = new(StartContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_start @@ -312,17 +321,19 @@ func (s *StartContext) GetParser() antlr.Parser { return s.parser } func (s *StartContext) GetE() IExprContext { return s.e } + func (s *StartContext) SetE(v IExprContext) { s.e = v } + func (s *StartContext) EOF() antlr.TerminalNode { return s.GetToken(CELParserEOF, 0) } func (s *StartContext) Expr() IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -342,6 +353,7 @@ func (s *StartContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *StartContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterStart(s) @@ -364,45 +376,46 @@ func (s *StartContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) Start() (localctx IStartContext) { - this := p - _ = this - localctx = NewStartContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 0, CELParserRULE_start) - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() +func (p *CELParser) Start_() (localctx IStartContext) { + localctx = NewStartContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 0, CELParserRULE_start) p.EnterOuterAlt(localctx, 1) { p.SetState(32) var _x = p.Expr() + localctx.(*StartContext).e = _x } { p.SetState(33) p.Match(CELParserEOF) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IExprContext is an interface to support dynamic dispatch. type IExprContext interface { antlr.ParserRuleContext @@ -411,10 +424,12 @@ type IExprContext interface { GetParser() antlr.Parser // GetOp returns the op token. - GetOp() antlr.Token + GetOp() antlr.Token + // SetOp sets the op token. - SetOp(antlr.Token) + SetOp(antlr.Token) + // GetE returns the e rule contexts. GetE() IConditionalOrContext @@ -425,6 +440,7 @@ type IExprContext interface { // GetE2 returns the e2 rule contexts. GetE2() IExprContext + // SetE sets the e rule contexts. SetE(IConditionalOrContext) @@ -434,6 +450,7 @@ type IExprContext interface { // SetE2 sets the e2 rule contexts. SetE2(IExprContext) + // Getter signatures AllConditionalOr() []IConditionalOrContext ConditionalOr(i int) IConditionalOrContext @@ -446,27 +463,32 @@ type IExprContext interface { } type ExprContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - e IConditionalOrContext - op antlr.Token - e1 IConditionalOrContext - e2 IExprContext + e IConditionalOrContext + op antlr.Token + e1 IConditionalOrContext + e2 IExprContext } func NewEmptyExprContext() *ExprContext { var p = new(ExprContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_expr return p } +func InitEmptyExprContext(p *ExprContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_expr +} + func (*ExprContext) IsExprContext() {} func NewExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExprContext { var p = new(ExprContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_expr @@ -478,20 +500,24 @@ func (s *ExprContext) GetParser() antlr.Parser { return s.parser } func (s *ExprContext) GetOp() antlr.Token { return s.op } + func (s *ExprContext) SetOp(v antlr.Token) { s.op = v } + func (s *ExprContext) GetE() IConditionalOrContext { return s.e } func (s *ExprContext) GetE1() IConditionalOrContext { return s.e1 } func (s *ExprContext) GetE2() IExprContext { return s.e2 } + func (s *ExprContext) SetE(v IConditionalOrContext) { s.e = v } func (s *ExprContext) SetE1(v IConditionalOrContext) { s.e1 = v } func (s *ExprContext) SetE2(v IExprContext) { s.e2 = v } + func (s *ExprContext) AllConditionalOr() []IConditionalOrContext { children := s.GetChildren() len := 0 @@ -514,12 +540,12 @@ func (s *ExprContext) AllConditionalOr() []IConditionalOrContext { } func (s *ExprContext) ConditionalOr(i int) IConditionalOrContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IConditionalOrContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -542,10 +568,10 @@ func (s *ExprContext) QUESTIONMARK() antlr.TerminalNode { } func (s *ExprContext) Expr() IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -565,6 +591,7 @@ func (s *ExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) s return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *ExprContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterExpr(s) @@ -587,42 +614,31 @@ func (s *ExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) Expr() (localctx IExprContext) { - this := p - _ = this + + +func (p *CELParser) Expr() (localctx IExprContext) { localctx = NewExprContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 2, CELParserRULE_expr) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) { p.SetState(35) var _x = p.ConditionalOr() + localctx.(*ExprContext).e = _x } p.SetState(41) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK { { p.SetState(36) @@ -630,31 +646,54 @@ func (p *CELParser) Expr() (localctx IExprContext) { var _m = p.Match(CELParserQUESTIONMARK) localctx.(*ExprContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(37) var _x = p.ConditionalOr() + localctx.(*ExprContext).e1 = _x } { p.SetState(38) p.Match(CELParserCOLON) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(39) var _x = p.Expr() + localctx.(*ExprContext).e2 = _x } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IConditionalOrContext is an interface to support dynamic dispatch. type IConditionalOrContext interface { antlr.ParserRuleContext @@ -663,34 +702,42 @@ type IConditionalOrContext interface { GetParser() antlr.Parser // GetS9 returns the s9 token. - GetS9() antlr.Token + GetS9() antlr.Token + // SetS9 sets the s9 token. - SetS9(antlr.Token) + SetS9(antlr.Token) + // GetOps returns the ops token list. GetOps() []antlr.Token + // SetOps sets the ops token list. SetOps([]antlr.Token) + // GetE returns the e rule contexts. GetE() IConditionalAndContext // Get_conditionalAnd returns the _conditionalAnd rule contexts. Get_conditionalAnd() IConditionalAndContext + // SetE sets the e rule contexts. SetE(IConditionalAndContext) // Set_conditionalAnd sets the _conditionalAnd rule contexts. Set_conditionalAnd(IConditionalAndContext) + // GetE1 returns the e1 rule context list. GetE1() []IConditionalAndContext + // SetE1 sets the e1 rule context list. - SetE1([]IConditionalAndContext) + SetE1([]IConditionalAndContext) + // Getter signatures AllConditionalAnd() []IConditionalAndContext @@ -703,28 +750,33 @@ type IConditionalOrContext interface { } type ConditionalOrContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser - e IConditionalAndContext - s9 antlr.Token - ops []antlr.Token - _conditionalAnd IConditionalAndContext - e1 []IConditionalAndContext + antlr.BaseParserRuleContext + parser antlr.Parser + e IConditionalAndContext + s9 antlr.Token + ops []antlr.Token + _conditionalAnd IConditionalAndContext + e1 []IConditionalAndContext } func NewEmptyConditionalOrContext() *ConditionalOrContext { var p = new(ConditionalOrContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_conditionalOr return p } +func InitEmptyConditionalOrContext(p *ConditionalOrContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_conditionalOr +} + func (*ConditionalOrContext) IsConditionalOrContext() {} func NewConditionalOrContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConditionalOrContext { var p = new(ConditionalOrContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_conditionalOr @@ -736,24 +788,32 @@ func (s *ConditionalOrContext) GetParser() antlr.Parser { return s.parser } func (s *ConditionalOrContext) GetS9() antlr.Token { return s.s9 } + func (s *ConditionalOrContext) SetS9(v antlr.Token) { s.s9 = v } + func (s *ConditionalOrContext) GetOps() []antlr.Token { return s.ops } + func (s *ConditionalOrContext) SetOps(v []antlr.Token) { s.ops = v } + func (s *ConditionalOrContext) GetE() IConditionalAndContext { return s.e } func (s *ConditionalOrContext) Get_conditionalAnd() IConditionalAndContext { return s._conditionalAnd } + func (s *ConditionalOrContext) SetE(v IConditionalAndContext) { s.e = v } func (s *ConditionalOrContext) Set_conditionalAnd(v IConditionalAndContext) { s._conditionalAnd = v } + func (s *ConditionalOrContext) GetE1() []IConditionalAndContext { return s.e1 } + func (s *ConditionalOrContext) SetE1(v []IConditionalAndContext) { s.e1 = v } + func (s *ConditionalOrContext) AllConditionalAnd() []IConditionalAndContext { children := s.GetChildren() len := 0 @@ -776,12 +836,12 @@ func (s *ConditionalOrContext) AllConditionalAnd() []IConditionalAndContext { } func (s *ConditionalOrContext) ConditionalAnd(i int) IConditionalAndContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IConditionalAndContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -811,6 +871,7 @@ func (s *ConditionalOrContext) ToStringTree(ruleNames []string, recog antlr.Reco return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *ConditionalOrContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterConditionalOr(s) @@ -833,42 +894,31 @@ func (s *ConditionalOrContext) Accept(visitor antlr.ParseTreeVisitor) interface{ } } -func (p *CELParser) ConditionalOr() (localctx IConditionalOrContext) { - this := p - _ = this + + +func (p *CELParser) ConditionalOr() (localctx IConditionalOrContext) { localctx = NewConditionalOrContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 4, CELParserRULE_conditionalOr) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) { p.SetState(43) var _x = p.ConditionalAnd() + localctx.(*ConditionalOrContext).e = _x } p.SetState(48) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + for _la == CELParserLOGICAL_OR { { p.SetState(44) @@ -876,6 +926,10 @@ func (p *CELParser) ConditionalOr() (localctx IConditionalOrContext) { var _m = p.Match(CELParserLOGICAL_OR) localctx.(*ConditionalOrContext).s9 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*ConditionalOrContext).ops = append(localctx.(*ConditionalOrContext).ops, localctx.(*ConditionalOrContext).s9) { @@ -883,18 +937,36 @@ func (p *CELParser) ConditionalOr() (localctx IConditionalOrContext) { var _x = p.ConditionalAnd() + localctx.(*ConditionalOrContext)._conditionalAnd = _x } localctx.(*ConditionalOrContext).e1 = append(localctx.(*ConditionalOrContext).e1, localctx.(*ConditionalOrContext)._conditionalAnd) + p.SetState(50) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IConditionalAndContext is an interface to support dynamic dispatch. type IConditionalAndContext interface { antlr.ParserRuleContext @@ -903,34 +975,42 @@ type IConditionalAndContext interface { GetParser() antlr.Parser // GetS8 returns the s8 token. - GetS8() antlr.Token + GetS8() antlr.Token + // SetS8 sets the s8 token. - SetS8(antlr.Token) + SetS8(antlr.Token) + // GetOps returns the ops token list. GetOps() []antlr.Token + // SetOps sets the ops token list. SetOps([]antlr.Token) + // GetE returns the e rule contexts. GetE() IRelationContext // Get_relation returns the _relation rule contexts. Get_relation() IRelationContext + // SetE sets the e rule contexts. SetE(IRelationContext) // Set_relation sets the _relation rule contexts. Set_relation(IRelationContext) + // GetE1 returns the e1 rule context list. GetE1() []IRelationContext + // SetE1 sets the e1 rule context list. - SetE1([]IRelationContext) + SetE1([]IRelationContext) + // Getter signatures AllRelation() []IRelationContext @@ -943,28 +1023,33 @@ type IConditionalAndContext interface { } type ConditionalAndContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser - e IRelationContext - s8 antlr.Token - ops []antlr.Token - _relation IRelationContext - e1 []IRelationContext + antlr.BaseParserRuleContext + parser antlr.Parser + e IRelationContext + s8 antlr.Token + ops []antlr.Token + _relation IRelationContext + e1 []IRelationContext } func NewEmptyConditionalAndContext() *ConditionalAndContext { var p = new(ConditionalAndContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_conditionalAnd return p } +func InitEmptyConditionalAndContext(p *ConditionalAndContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_conditionalAnd +} + func (*ConditionalAndContext) IsConditionalAndContext() {} func NewConditionalAndContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ConditionalAndContext { var p = new(ConditionalAndContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_conditionalAnd @@ -976,24 +1061,32 @@ func (s *ConditionalAndContext) GetParser() antlr.Parser { return s.parser } func (s *ConditionalAndContext) GetS8() antlr.Token { return s.s8 } + func (s *ConditionalAndContext) SetS8(v antlr.Token) { s.s8 = v } + func (s *ConditionalAndContext) GetOps() []antlr.Token { return s.ops } + func (s *ConditionalAndContext) SetOps(v []antlr.Token) { s.ops = v } + func (s *ConditionalAndContext) GetE() IRelationContext { return s.e } func (s *ConditionalAndContext) Get_relation() IRelationContext { return s._relation } + func (s *ConditionalAndContext) SetE(v IRelationContext) { s.e = v } func (s *ConditionalAndContext) Set_relation(v IRelationContext) { s._relation = v } + func (s *ConditionalAndContext) GetE1() []IRelationContext { return s.e1 } + func (s *ConditionalAndContext) SetE1(v []IRelationContext) { s.e1 = v } + func (s *ConditionalAndContext) AllRelation() []IRelationContext { children := s.GetChildren() len := 0 @@ -1016,12 +1109,12 @@ func (s *ConditionalAndContext) AllRelation() []IRelationContext { } func (s *ConditionalAndContext) Relation(i int) IRelationContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IRelationContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -1051,6 +1144,7 @@ func (s *ConditionalAndContext) ToStringTree(ruleNames []string, recog antlr.Rec return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *ConditionalAndContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterConditionalAnd(s) @@ -1073,30 +1167,14 @@ func (s *ConditionalAndContext) Accept(visitor antlr.ParseTreeVisitor) interface } } -func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) { - this := p - _ = this + + +func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) { localctx = NewConditionalAndContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 6, CELParserRULE_conditionalAnd) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) { p.SetState(51) @@ -1107,8 +1185,12 @@ func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) { } p.SetState(56) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + for _la == CELParserLOGICAL_AND { { p.SetState(52) @@ -1116,6 +1198,10 @@ func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) { var _m = p.Match(CELParserLOGICAL_AND) localctx.(*ConditionalAndContext).s8 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*ConditionalAndContext).ops = append(localctx.(*ConditionalAndContext).ops, localctx.(*ConditionalAndContext).s8) { @@ -1127,14 +1213,31 @@ func (p *CELParser) ConditionalAnd() (localctx IConditionalAndContext) { } localctx.(*ConditionalAndContext).e1 = append(localctx.(*ConditionalAndContext).e1, localctx.(*ConditionalAndContext)._relation) + p.SetState(58) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IRelationContext is an interface to support dynamic dispatch. type IRelationContext interface { antlr.ParserRuleContext @@ -1143,10 +1246,12 @@ type IRelationContext interface { GetParser() antlr.Parser // GetOp returns the op token. - GetOp() antlr.Token + GetOp() antlr.Token + // SetOp sets the op token. - SetOp(antlr.Token) + SetOp(antlr.Token) + // Getter signatures Calc() ICalcContext @@ -1165,24 +1270,29 @@ type IRelationContext interface { } type RelationContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - op antlr.Token + op antlr.Token } func NewEmptyRelationContext() *RelationContext { var p = new(RelationContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_relation return p } +func InitEmptyRelationContext(p *RelationContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_relation +} + func (*RelationContext) IsRelationContext() {} func NewRelationContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *RelationContext { var p = new(RelationContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_relation @@ -1194,13 +1304,15 @@ func (s *RelationContext) GetParser() antlr.Parser { return s.parser } func (s *RelationContext) GetOp() antlr.Token { return s.op } + func (s *RelationContext) SetOp(v antlr.Token) { s.op = v } + func (s *RelationContext) Calc() ICalcContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(ICalcContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -1234,12 +1346,12 @@ func (s *RelationContext) AllRelation() []IRelationContext { } func (s *RelationContext) Relation(i int) IRelationContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IRelationContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -1289,6 +1401,7 @@ func (s *RelationContext) ToStringTree(ruleNames []string, recog antlr.Recognize return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *RelationContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterRelation(s) @@ -1311,15 +1424,17 @@ func (s *RelationContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + + + + func (p *CELParser) Relation() (localctx IRelationContext) { return p.relation(0) } func (p *CELParser) relation(_p int) (localctx IRelationContext) { - this := p - _ = this - var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() localctx = NewRelationContext(p, p.GetParserRuleContext(), _parentState) var _prevctx IRelationContext = localctx @@ -1328,22 +1443,6 @@ func (p *CELParser) relation(_p int) (localctx IRelationContext) { p.EnterRecursionRule(localctx, 8, CELParserRULE_relation, _p) var _la int - defer func() { - p.UnrollRecursionContexts(_parentctx) - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - var _alt int p.EnterOuterAlt(localctx, 1) @@ -1355,8 +1454,13 @@ func (p *CELParser) relation(_p int) (localctx IRelationContext) { p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) p.SetState(67) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 3, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { if p.GetParseListeners() != nil { @@ -1368,7 +1472,8 @@ func (p *CELParser) relation(_p int) (localctx IRelationContext) { p.SetState(62) if !(p.Precpred(p.GetParserRuleContext(), 1)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + goto errorExit } { p.SetState(63) @@ -1379,7 +1484,7 @@ func (p *CELParser) relation(_p int) (localctx IRelationContext) { _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&254) != 0) { + if !(((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 254) != 0)) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*RelationContext).op = _ri @@ -1393,15 +1498,35 @@ func (p *CELParser) relation(_p int) (localctx IRelationContext) { p.relation(2) } + } p.SetState(69) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 3, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 3, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + + errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.UnrollRecursionContexts(_parentctx) return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // ICalcContext is an interface to support dynamic dispatch. type ICalcContext interface { antlr.ParserRuleContext @@ -1410,10 +1535,12 @@ type ICalcContext interface { GetParser() antlr.Parser // GetOp returns the op token. - GetOp() antlr.Token + GetOp() antlr.Token + // SetOp sets the op token. - SetOp(antlr.Token) + SetOp(antlr.Token) + // Getter signatures Unary() IUnaryContext @@ -1430,24 +1557,29 @@ type ICalcContext interface { } type CalcContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - op antlr.Token + op antlr.Token } func NewEmptyCalcContext() *CalcContext { var p = new(CalcContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_calc return p } +func InitEmptyCalcContext(p *CalcContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_calc +} + func (*CalcContext) IsCalcContext() {} func NewCalcContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *CalcContext { var p = new(CalcContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_calc @@ -1459,13 +1591,15 @@ func (s *CalcContext) GetParser() antlr.Parser { return s.parser } func (s *CalcContext) GetOp() antlr.Token { return s.op } + func (s *CalcContext) SetOp(v antlr.Token) { s.op = v } + func (s *CalcContext) Unary() IUnaryContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IUnaryContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -1499,12 +1633,12 @@ func (s *CalcContext) AllCalc() []ICalcContext { } func (s *CalcContext) Calc(i int) ICalcContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(ICalcContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -1546,6 +1680,7 @@ func (s *CalcContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) s return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *CalcContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterCalc(s) @@ -1568,15 +1703,17 @@ func (s *CalcContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + + + + func (p *CELParser) Calc() (localctx ICalcContext) { return p.calc(0) } func (p *CELParser) calc(_p int) (localctx ICalcContext) { - this := p - _ = this - var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() localctx = NewCalcContext(p, p.GetParserRuleContext(), _parentState) var _prevctx ICalcContext = localctx @@ -1585,22 +1722,6 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { p.EnterRecursionRule(localctx, 10, CELParserRULE_calc, _p) var _la int - defer func() { - p.UnrollRecursionContexts(_parentctx) - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - var _alt int p.EnterOuterAlt(localctx, 1) @@ -1612,8 +1733,13 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) p.SetState(81) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 5, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { if p.GetParseListeners() != nil { @@ -1622,14 +1748,19 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { _prevctx = localctx p.SetState(79) p.GetErrorHandler().Sync(p) - switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 4, p.GetParserRuleContext()) { + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 4, p.GetParserRuleContext()) { case 1: localctx = NewCalcContext(p, _parentctx, _parentState) p.PushNewRecursionContext(localctx, _startState, CELParserRULE_calc) p.SetState(73) if !(p.Precpred(p.GetParserRuleContext(), 2)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + goto errorExit } { p.SetState(74) @@ -1640,7 +1771,7 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { _la = p.GetTokenStream().LA(1) - if !((int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&58720256) != 0) { + if !(((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 58720256) != 0)) { var _ri = p.GetErrorHandler().RecoverInline(p) localctx.(*CalcContext).op = _ri @@ -1654,13 +1785,15 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { p.calc(3) } + case 2: localctx = NewCalcContext(p, _parentctx, _parentState) p.PushNewRecursionContext(localctx, _startState, CELParserRULE_calc) p.SetState(76) if !(p.Precpred(p.GetParserRuleContext(), 1)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + goto errorExit } { p.SetState(77) @@ -1685,17 +1818,38 @@ func (p *CELParser) calc(_p int) (localctx ICalcContext) { p.calc(2) } + case antlr.ATNInvalidAltNumber: + goto errorExit } } p.SetState(83) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 5, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 5, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + + errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.UnrollRecursionContexts(_parentctx) return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IUnaryContext is an interface to support dynamic dispatch. type IUnaryContext interface { antlr.ParserRuleContext @@ -1707,23 +1861,28 @@ type IUnaryContext interface { } type UnaryContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser } func NewEmptyUnaryContext() *UnaryContext { var p = new(UnaryContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_unary return p } +func InitEmptyUnaryContext(p *UnaryContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_unary +} + func (*UnaryContext) IsUnaryContext() {} func NewUnaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *UnaryContext { var p = new(UnaryContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_unary @@ -1733,8 +1892,8 @@ func NewUnaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invoki func (s *UnaryContext) GetParser() antlr.Parser { return s.parser } -func (s *UnaryContext) CopyFrom(ctx *UnaryContext) { - s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +func (s *UnaryContext) CopyAll(ctx *UnaryContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) } func (s *UnaryContext) GetRuleContext() antlr.RuleContext { @@ -1745,8 +1904,11 @@ func (s *UnaryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) return antlr.TreesStringTree(s, ruleNames, recog) } + + + type LogicalNotContext struct { - *UnaryContext + UnaryContext s19 antlr.Token ops []antlr.Token } @@ -1754,19 +1916,23 @@ type LogicalNotContext struct { func NewLogicalNotContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *LogicalNotContext { var p = new(LogicalNotContext) - p.UnaryContext = NewEmptyUnaryContext() + InitEmptyUnaryContext(&p.UnaryContext) p.parser = parser - p.CopyFrom(ctx.(*UnaryContext)) + p.CopyAll(ctx.(*UnaryContext)) return p } + func (s *LogicalNotContext) GetS19() antlr.Token { return s.s19 } + func (s *LogicalNotContext) SetS19(v antlr.Token) { s.s19 = v } + func (s *LogicalNotContext) GetOps() []antlr.Token { return s.ops } + func (s *LogicalNotContext) SetOps(v []antlr.Token) { s.ops = v } func (s *LogicalNotContext) GetRuleContext() antlr.RuleContext { @@ -1774,10 +1940,10 @@ func (s *LogicalNotContext) GetRuleContext() antlr.RuleContext { } func (s *LogicalNotContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -1797,6 +1963,7 @@ func (s *LogicalNotContext) EXCLAM(i int) antlr.TerminalNode { return s.GetToken(CELParserEXCLAM, i) } + func (s *LogicalNotContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterLogicalNot(s) @@ -1819,16 +1986,17 @@ func (s *LogicalNotContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type MemberExprContext struct { - *UnaryContext + UnaryContext } func NewMemberExprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *MemberExprContext { var p = new(MemberExprContext) - p.UnaryContext = NewEmptyUnaryContext() + InitEmptyUnaryContext(&p.UnaryContext) p.parser = parser - p.CopyFrom(ctx.(*UnaryContext)) + p.CopyAll(ctx.(*UnaryContext)) return p } @@ -1838,10 +2006,10 @@ func (s *MemberExprContext) GetRuleContext() antlr.RuleContext { } func (s *MemberExprContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -1853,6 +2021,7 @@ func (s *MemberExprContext) Member() IMemberContext { return t.(IMemberContext) } + func (s *MemberExprContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterMemberExpr(s) @@ -1875,8 +2044,9 @@ func (s *MemberExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type NegateContext struct { - *UnaryContext + UnaryContext s18 antlr.Token ops []antlr.Token } @@ -1884,19 +2054,23 @@ type NegateContext struct { func NewNegateContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NegateContext { var p = new(NegateContext) - p.UnaryContext = NewEmptyUnaryContext() + InitEmptyUnaryContext(&p.UnaryContext) p.parser = parser - p.CopyFrom(ctx.(*UnaryContext)) + p.CopyAll(ctx.(*UnaryContext)) return p } + func (s *NegateContext) GetS18() antlr.Token { return s.s18 } + func (s *NegateContext) SetS18(v antlr.Token) { s.s18 = v } + func (s *NegateContext) GetOps() []antlr.Token { return s.ops } + func (s *NegateContext) SetOps(v []antlr.Token) { s.ops = v } func (s *NegateContext) GetRuleContext() antlr.RuleContext { @@ -1904,10 +2078,10 @@ func (s *NegateContext) GetRuleContext() antlr.RuleContext { } func (s *NegateContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -1927,6 +2101,7 @@ func (s *NegateContext) MINUS(i int) antlr.TerminalNode { return s.GetToken(CELParserMINUS, i) } + func (s *NegateContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterNegate(s) @@ -1949,35 +2124,22 @@ func (s *NegateContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) Unary() (localctx IUnaryContext) { - this := p - _ = this + +func (p *CELParser) Unary() (localctx IUnaryContext) { localctx = NewUnaryContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 12, CELParserRULE_unary) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - var _alt int p.SetState(97) p.GetErrorHandler().Sync(p) - switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 8, p.GetParserRuleContext()) { + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 8, p.GetParserRuleContext()) { case 1: localctx = NewMemberExprContext(p, localctx) p.EnterOuterAlt(localctx, 1) @@ -1986,13 +2148,18 @@ func (p *CELParser) Unary() (localctx IUnaryContext) { p.member(0) } + case 2: localctx = NewLogicalNotContext(p, localctx) p.EnterOuterAlt(localctx, 2) p.SetState(86) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + for ok := true; ok; ok = _la == CELParserEXCLAM { { p.SetState(85) @@ -2000,11 +2167,19 @@ func (p *CELParser) Unary() (localctx IUnaryContext) { var _m = p.Match(CELParserEXCLAM) localctx.(*LogicalNotContext).s19 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*LogicalNotContext).ops = append(localctx.(*LogicalNotContext).ops, localctx.(*LogicalNotContext).s19) + p.SetState(88) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) } { @@ -2012,42 +2187,71 @@ func (p *CELParser) Unary() (localctx IUnaryContext) { p.member(0) } + case 3: localctx = NewNegateContext(p, localctx) p.EnterOuterAlt(localctx, 3) p.SetState(92) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _alt = 1 for ok := true; ok; ok = _alt != 2 && _alt != antlr.ATNInvalidAltNumber { switch _alt { case 1: - { - p.SetState(91) + { + p.SetState(91) + + var _m = p.Match(CELParserMINUS) + + localctx.(*NegateContext).s18 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } + } + localctx.(*NegateContext).ops = append(localctx.(*NegateContext).ops, localctx.(*NegateContext).s18) + - var _m = p.Match(CELParserMINUS) - localctx.(*NegateContext).s18 = _m - } - localctx.(*NegateContext).ops = append(localctx.(*NegateContext).ops, localctx.(*NegateContext).s18) default: - panic(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + p.SetError(antlr.NewNoViableAltException(p, nil, nil, nil, nil, nil)) + goto errorExit } p.SetState(94) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 7, p.GetParserRuleContext()) + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 7, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } { p.SetState(96) p.member(0) } + case antlr.ATNInvalidAltNumber: + goto errorExit } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IMemberContext is an interface to support dynamic dispatch. type IMemberContext interface { antlr.ParserRuleContext @@ -2059,23 +2263,28 @@ type IMemberContext interface { } type MemberContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser } func NewEmptyMemberContext() *MemberContext { var p = new(MemberContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_member return p } +func InitEmptyMemberContext(p *MemberContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_member +} + func (*MemberContext) IsMemberContext() {} func NewMemberContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MemberContext { var p = new(MemberContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_member @@ -2085,8 +2294,8 @@ func NewMemberContext(parser antlr.Parser, parent antlr.ParserRuleContext, invok func (s *MemberContext) GetParser() antlr.Parser { return s.parser } -func (s *MemberContext) CopyFrom(ctx *MemberContext) { - s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +func (s *MemberContext) CopyAll(ctx *MemberContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) } func (s *MemberContext) GetRuleContext() antlr.RuleContext { @@ -2097,38 +2306,46 @@ func (s *MemberContext) ToStringTree(ruleNames []string, recog antlr.Recognizer) return antlr.TreesStringTree(s, ruleNames, recog) } + + + + type MemberCallContext struct { - *MemberContext - op antlr.Token - id antlr.Token + MemberContext + op antlr.Token + id antlr.Token open antlr.Token - args IExprListContext + args IExprListContext } func NewMemberCallContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *MemberCallContext { var p = new(MemberCallContext) - p.MemberContext = NewEmptyMemberContext() + InitEmptyMemberContext(&p.MemberContext) p.parser = parser - p.CopyFrom(ctx.(*MemberContext)) + p.CopyAll(ctx.(*MemberContext)) return p } + func (s *MemberCallContext) GetOp() antlr.Token { return s.op } func (s *MemberCallContext) GetId() antlr.Token { return s.id } func (s *MemberCallContext) GetOpen() antlr.Token { return s.open } + func (s *MemberCallContext) SetOp(v antlr.Token) { s.op = v } func (s *MemberCallContext) SetId(v antlr.Token) { s.id = v } func (s *MemberCallContext) SetOpen(v antlr.Token) { s.open = v } + func (s *MemberCallContext) GetArgs() IExprListContext { return s.args } + func (s *MemberCallContext) SetArgs(v IExprListContext) { s.args = v } func (s *MemberCallContext) GetRuleContext() antlr.RuleContext { @@ -2136,10 +2353,10 @@ func (s *MemberCallContext) GetRuleContext() antlr.RuleContext { } func (s *MemberCallContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2168,10 +2385,10 @@ func (s *MemberCallContext) LPAREN() antlr.TerminalNode { } func (s *MemberCallContext) ExprList() IExprListContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprListContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2183,6 +2400,7 @@ func (s *MemberCallContext) ExprList() IExprListContext { return t.(IExprListContext) } + func (s *MemberCallContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterMemberCall(s) @@ -2205,29 +2423,32 @@ func (s *MemberCallContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type SelectContext struct { - *MemberContext - op antlr.Token + MemberContext + op antlr.Token opt antlr.Token - id antlr.Token + id antlr.Token } func NewSelectContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *SelectContext { var p = new(SelectContext) - p.MemberContext = NewEmptyMemberContext() + InitEmptyMemberContext(&p.MemberContext) p.parser = parser - p.CopyFrom(ctx.(*MemberContext)) + p.CopyAll(ctx.(*MemberContext)) return p } + func (s *SelectContext) GetOp() antlr.Token { return s.op } func (s *SelectContext) GetOpt() antlr.Token { return s.opt } func (s *SelectContext) GetId() antlr.Token { return s.id } + func (s *SelectContext) SetOp(v antlr.Token) { s.op = v } func (s *SelectContext) SetOpt(v antlr.Token) { s.opt = v } @@ -2239,10 +2460,10 @@ func (s *SelectContext) GetRuleContext() antlr.RuleContext { } func (s *SelectContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2266,6 +2487,7 @@ func (s *SelectContext) QUESTIONMARK() antlr.TerminalNode { return s.GetToken(CELParserQUESTIONMARK, 0) } + func (s *SelectContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterSelect(s) @@ -2288,16 +2510,17 @@ func (s *SelectContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type PrimaryExprContext struct { - *MemberContext + MemberContext } func NewPrimaryExprContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *PrimaryExprContext { var p = new(PrimaryExprContext) - p.MemberContext = NewEmptyMemberContext() + InitEmptyMemberContext(&p.MemberContext) p.parser = parser - p.CopyFrom(ctx.(*MemberContext)) + p.CopyAll(ctx.(*MemberContext)) return p } @@ -2307,10 +2530,10 @@ func (s *PrimaryExprContext) GetRuleContext() antlr.RuleContext { } func (s *PrimaryExprContext) Primary() IPrimaryContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IPrimaryContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2322,6 +2545,7 @@ func (s *PrimaryExprContext) Primary() IPrimaryContext { return t.(IPrimaryContext) } + func (s *PrimaryExprContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterPrimaryExpr(s) @@ -2344,33 +2568,38 @@ func (s *PrimaryExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} } } + type IndexContext struct { - *MemberContext - op antlr.Token - opt antlr.Token - index IExprContext + MemberContext + op antlr.Token + opt antlr.Token + index IExprContext } func NewIndexContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IndexContext { var p = new(IndexContext) - p.MemberContext = NewEmptyMemberContext() + InitEmptyMemberContext(&p.MemberContext) p.parser = parser - p.CopyFrom(ctx.(*MemberContext)) + p.CopyAll(ctx.(*MemberContext)) return p } + func (s *IndexContext) GetOp() antlr.Token { return s.op } func (s *IndexContext) GetOpt() antlr.Token { return s.opt } + func (s *IndexContext) SetOp(v antlr.Token) { s.op = v } func (s *IndexContext) SetOpt(v antlr.Token) { s.opt = v } + func (s *IndexContext) GetIndex() IExprContext { return s.index } + func (s *IndexContext) SetIndex(v IExprContext) { s.index = v } func (s *IndexContext) GetRuleContext() antlr.RuleContext { @@ -2378,10 +2607,10 @@ func (s *IndexContext) GetRuleContext() antlr.RuleContext { } func (s *IndexContext) Member() IMemberContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMemberContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2402,10 +2631,10 @@ func (s *IndexContext) LBRACKET() antlr.TerminalNode { } func (s *IndexContext) Expr() IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2421,6 +2650,7 @@ func (s *IndexContext) QUESTIONMARK() antlr.TerminalNode { return s.GetToken(CELParserQUESTIONMARK, 0) } + func (s *IndexContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterIndex(s) @@ -2443,15 +2673,15 @@ func (s *IndexContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + + func (p *CELParser) Member() (localctx IMemberContext) { return p.member(0) } func (p *CELParser) member(_p int) (localctx IMemberContext) { - this := p - _ = this - var _parentctx antlr.ParserRuleContext = p.GetParserRuleContext() + _parentState := p.GetState() localctx = NewMemberContext(p, p.GetParserRuleContext(), _parentState) var _prevctx IMemberContext = localctx @@ -2460,22 +2690,6 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { p.EnterRecursionRule(localctx, 14, CELParserRULE_member, _p) var _la int - defer func() { - p.UnrollRecursionContexts(_parentctx) - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - var _alt int p.EnterOuterAlt(localctx, 1) @@ -2491,8 +2705,13 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { p.GetParserRuleContext().SetStop(p.GetTokenStream().LT(-1)) p.SetState(126) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 13, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 13, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { if p.GetParseListeners() != nil { @@ -2501,14 +2720,19 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { _prevctx = localctx p.SetState(124) p.GetErrorHandler().Sync(p) - switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 12, p.GetParserRuleContext()) { + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 12, p.GetParserRuleContext()) { case 1: localctx = NewSelectContext(p, NewMemberContext(p, _parentctx, _parentState)) p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member) p.SetState(102) if !(p.Precpred(p.GetParserRuleContext(), 3)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 3)", "")) + goto errorExit } { p.SetState(103) @@ -2516,11 +2740,19 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserDOT) localctx.(*SelectContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(105) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK { { p.SetState(104) @@ -2528,6 +2760,10 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserQUESTIONMARK) localctx.(*SelectContext).opt = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -2537,15 +2773,21 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserIDENTIFIER) localctx.(*SelectContext).id = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 2: localctx = NewMemberCallContext(p, NewMemberContext(p, _parentctx, _parentState)) p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member) p.SetState(108) if !(p.Precpred(p.GetParserRuleContext(), 2)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 2)", "")) + goto errorExit } { p.SetState(109) @@ -2553,6 +2795,10 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserDOT) localctx.(*MemberCallContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(110) @@ -2560,6 +2806,10 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserIDENTIFIER) localctx.(*MemberCallContext).id = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(111) @@ -2567,17 +2817,26 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserLPAREN) localctx.(*MemberCallContext).open = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(113) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) - if (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&135762105344) != 0 { + + if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135762105344) != 0) { { p.SetState(112) var _x = p.ExprList() + localctx.(*MemberCallContext).args = _x } @@ -2585,15 +2844,21 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { { p.SetState(115) p.Match(CELParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 3: localctx = NewIndexContext(p, NewMemberContext(p, _parentctx, _parentState)) p.PushNewRecursionContext(localctx, _startState, CELParserRULE_member) p.SetState(116) if !(p.Precpred(p.GetParserRuleContext(), 1)) { - panic(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + p.SetError(antlr.NewFailedPredicateException(p, "p.Precpred(p.GetParserRuleContext(), 1)", "")) + goto errorExit } { p.SetState(117) @@ -2601,11 +2866,19 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserLBRACKET) localctx.(*IndexContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(119) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK { { p.SetState(118) @@ -2613,6 +2886,10 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _m = p.Match(CELParserQUESTIONMARK) localctx.(*IndexContext).opt = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -2621,24 +2898,50 @@ func (p *CELParser) member(_p int) (localctx IMemberContext) { var _x = p.Expr() + localctx.(*IndexContext).index = _x } { p.SetState(122) p.Match(CELParserRPRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case antlr.ATNInvalidAltNumber: + goto errorExit } } p.SetState(128) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 13, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 13, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + + errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.UnrollRecursionContexts(_parentctx) return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IPrimaryContext is an interface to support dynamic dispatch. type IPrimaryContext interface { antlr.ParserRuleContext @@ -2650,23 +2953,28 @@ type IPrimaryContext interface { } type PrimaryContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser } func NewEmptyPrimaryContext() *PrimaryContext { var p = new(PrimaryContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_primary return p } +func InitEmptyPrimaryContext(p *PrimaryContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_primary +} + func (*PrimaryContext) IsPrimaryContext() {} func NewPrimaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *PrimaryContext { var p = new(PrimaryContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_primary @@ -2676,8 +2984,8 @@ func NewPrimaryContext(parser antlr.Parser, parent antlr.ParserRuleContext, invo func (s *PrimaryContext) GetParser() antlr.Parser { return s.parser } -func (s *PrimaryContext) CopyFrom(ctx *PrimaryContext) { - s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +func (s *PrimaryContext) CopyAll(ctx *PrimaryContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) } func (s *PrimaryContext) GetRuleContext() antlr.RuleContext { @@ -2688,28 +2996,35 @@ func (s *PrimaryContext) ToStringTree(ruleNames []string, recog antlr.Recognizer return antlr.TreesStringTree(s, ruleNames, recog) } + + + type CreateListContext struct { - *PrimaryContext - op antlr.Token - elems IListInitContext + PrimaryContext + op antlr.Token + elems IListInitContext } func NewCreateListContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateListContext { var p = new(CreateListContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } + func (s *CreateListContext) GetOp() antlr.Token { return s.op } + func (s *CreateListContext) SetOp(v antlr.Token) { s.op = v } + func (s *CreateListContext) GetElems() IListInitContext { return s.elems } + func (s *CreateListContext) SetElems(v IListInitContext) { s.elems = v } func (s *CreateListContext) GetRuleContext() antlr.RuleContext { @@ -2729,10 +3044,10 @@ func (s *CreateListContext) COMMA() antlr.TerminalNode { } func (s *CreateListContext) ListInit() IListInitContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IListInitContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2744,6 +3059,7 @@ func (s *CreateListContext) ListInit() IListInitContext { return t.(IListInitContext) } + func (s *CreateListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterCreateList(s) @@ -2766,28 +3082,33 @@ func (s *CreateListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type CreateStructContext struct { - *PrimaryContext - op antlr.Token - entries IMapInitializerListContext + PrimaryContext + op antlr.Token + entries IMapInitializerListContext } func NewCreateStructContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateStructContext { var p = new(CreateStructContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } + func (s *CreateStructContext) GetOp() antlr.Token { return s.op } + func (s *CreateStructContext) SetOp(v antlr.Token) { s.op = v } + func (s *CreateStructContext) GetEntries() IMapInitializerListContext { return s.entries } + func (s *CreateStructContext) SetEntries(v IMapInitializerListContext) { s.entries = v } func (s *CreateStructContext) GetRuleContext() antlr.RuleContext { @@ -2807,10 +3128,10 @@ func (s *CreateStructContext) COMMA() antlr.TerminalNode { } func (s *CreateStructContext) MapInitializerList() IMapInitializerListContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IMapInitializerListContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2822,6 +3143,7 @@ func (s *CreateStructContext) MapInitializerList() IMapInitializerListContext { return t.(IMapInitializerListContext) } + func (s *CreateStructContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterCreateStruct(s) @@ -2844,16 +3166,17 @@ func (s *CreateStructContext) Accept(visitor antlr.ParseTreeVisitor) interface{} } } + type ConstantLiteralContext struct { - *PrimaryContext + PrimaryContext } func NewConstantLiteralContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *ConstantLiteralContext { var p = new(ConstantLiteralContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } @@ -2863,10 +3186,10 @@ func (s *ConstantLiteralContext) GetRuleContext() antlr.RuleContext { } func (s *ConstantLiteralContext) Literal() ILiteralContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(ILiteralContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2878,6 +3201,7 @@ func (s *ConstantLiteralContext) Literal() ILiteralContext { return t.(ILiteralContext) } + func (s *ConstantLiteralContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterConstantLiteral(s) @@ -2900,23 +3224,26 @@ func (s *ConstantLiteralContext) Accept(visitor antlr.ParseTreeVisitor) interfac } } + type NestedContext struct { - *PrimaryContext - e IExprContext + PrimaryContext + e IExprContext } func NewNestedContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NestedContext { var p = new(NestedContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } + func (s *NestedContext) GetE() IExprContext { return s.e } + func (s *NestedContext) SetE(v IExprContext) { s.e = v } func (s *NestedContext) GetRuleContext() antlr.RuleContext { @@ -2932,10 +3259,10 @@ func (s *NestedContext) RPAREN() antlr.TerminalNode { } func (s *NestedContext) Expr() IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -2947,6 +3274,7 @@ func (s *NestedContext) Expr() IExprContext { return t.(IExprContext) } + func (s *NestedContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterNested(s) @@ -2969,27 +3297,29 @@ func (s *NestedContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type CreateMessageContext struct { - *PrimaryContext - leadingDot antlr.Token + PrimaryContext + leadingDot antlr.Token _IDENTIFIER antlr.Token - ids []antlr.Token - s16 antlr.Token - ops []antlr.Token - op antlr.Token - entries IFieldInitializerListContext + ids []antlr.Token + s16 antlr.Token + ops []antlr.Token + op antlr.Token + entries IFieldInitializerListContext } func NewCreateMessageContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *CreateMessageContext { var p = new(CreateMessageContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } + func (s *CreateMessageContext) GetLeadingDot() antlr.Token { return s.leadingDot } func (s *CreateMessageContext) Get_IDENTIFIER() antlr.Token { return s._IDENTIFIER } @@ -2998,6 +3328,7 @@ func (s *CreateMessageContext) GetS16() antlr.Token { return s.s16 } func (s *CreateMessageContext) GetOp() antlr.Token { return s.op } + func (s *CreateMessageContext) SetLeadingDot(v antlr.Token) { s.leadingDot = v } func (s *CreateMessageContext) Set_IDENTIFIER(v antlr.Token) { s._IDENTIFIER = v } @@ -3006,16 +3337,20 @@ func (s *CreateMessageContext) SetS16(v antlr.Token) { s.s16 = v } func (s *CreateMessageContext) SetOp(v antlr.Token) { s.op = v } + func (s *CreateMessageContext) GetIds() []antlr.Token { return s.ids } func (s *CreateMessageContext) GetOps() []antlr.Token { return s.ops } + func (s *CreateMessageContext) SetIds(v []antlr.Token) { s.ids = v } func (s *CreateMessageContext) SetOps(v []antlr.Token) { s.ops = v } + func (s *CreateMessageContext) GetEntries() IFieldInitializerListContext { return s.entries } + func (s *CreateMessageContext) SetEntries(v IFieldInitializerListContext) { s.entries = v } func (s *CreateMessageContext) GetRuleContext() antlr.RuleContext { @@ -3051,10 +3386,10 @@ func (s *CreateMessageContext) DOT(i int) antlr.TerminalNode { } func (s *CreateMessageContext) FieldInitializerList() IFieldInitializerListContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IFieldInitializerListContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -3066,6 +3401,7 @@ func (s *CreateMessageContext) FieldInitializerList() IFieldInitializerListConte return t.(IFieldInitializerListContext) } + func (s *CreateMessageContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterCreateMessage(s) @@ -3088,38 +3424,43 @@ func (s *CreateMessageContext) Accept(visitor antlr.ParseTreeVisitor) interface{ } } + type IdentOrGlobalCallContext struct { - *PrimaryContext + PrimaryContext leadingDot antlr.Token - id antlr.Token - op antlr.Token - args IExprListContext + id antlr.Token + op antlr.Token + args IExprListContext } func NewIdentOrGlobalCallContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IdentOrGlobalCallContext { var p = new(IdentOrGlobalCallContext) - p.PrimaryContext = NewEmptyPrimaryContext() + InitEmptyPrimaryContext(&p.PrimaryContext) p.parser = parser - p.CopyFrom(ctx.(*PrimaryContext)) + p.CopyAll(ctx.(*PrimaryContext)) return p } + func (s *IdentOrGlobalCallContext) GetLeadingDot() antlr.Token { return s.leadingDot } func (s *IdentOrGlobalCallContext) GetId() antlr.Token { return s.id } func (s *IdentOrGlobalCallContext) GetOp() antlr.Token { return s.op } + func (s *IdentOrGlobalCallContext) SetLeadingDot(v antlr.Token) { s.leadingDot = v } func (s *IdentOrGlobalCallContext) SetId(v antlr.Token) { s.id = v } func (s *IdentOrGlobalCallContext) SetOp(v antlr.Token) { s.op = v } + func (s *IdentOrGlobalCallContext) GetArgs() IExprListContext { return s.args } + func (s *IdentOrGlobalCallContext) SetArgs(v IExprListContext) { s.args = v } func (s *IdentOrGlobalCallContext) GetRuleContext() antlr.RuleContext { @@ -3143,10 +3484,10 @@ func (s *IdentOrGlobalCallContext) LPAREN() antlr.TerminalNode { } func (s *IdentOrGlobalCallContext) ExprList() IExprListContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprListContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -3158,6 +3499,7 @@ func (s *IdentOrGlobalCallContext) ExprList() IExprListContext { return t.(IExprListContext) } + func (s *IdentOrGlobalCallContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterIdentOrGlobalCall(s) @@ -3180,40 +3522,31 @@ func (s *IdentOrGlobalCallContext) Accept(visitor antlr.ParseTreeVisitor) interf } } -func (p *CELParser) Primary() (localctx IPrimaryContext) { - this := p - _ = this + +func (p *CELParser) Primary() (localctx IPrimaryContext) { localctx = NewPrimaryContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 16, CELParserRULE_primary) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.SetState(180) p.GetErrorHandler().Sync(p) - switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 25, p.GetParserRuleContext()) { + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 25, p.GetParserRuleContext()) { case 1: localctx = NewIdentOrGlobalCallContext(p, localctx) p.EnterOuterAlt(localctx, 1) p.SetState(130) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserDOT { { p.SetState(129) @@ -3221,6 +3554,10 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserDOT) localctx.(*IdentOrGlobalCallContext).leadingDot = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -3230,28 +3567,42 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserIDENTIFIER) localctx.(*IdentOrGlobalCallContext).id = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(138) p.GetErrorHandler().Sync(p) - if p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 16, p.GetParserRuleContext()) == 1 { + + if p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 16, p.GetParserRuleContext()) == 1 { { p.SetState(133) var _m = p.Match(CELParserLPAREN) localctx.(*IdentOrGlobalCallContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(135) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) - if (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&135762105344) != 0 { + + if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135762105344) != 0) { { p.SetState(134) var _x = p.ExprList() + localctx.(*IdentOrGlobalCallContext).args = _x } @@ -3259,29 +3610,46 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { { p.SetState(137) p.Match(CELParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + } else if p.HasError() { // JIM + goto errorExit } + case 2: localctx = NewNestedContext(p, localctx) p.EnterOuterAlt(localctx, 2) { p.SetState(140) p.Match(CELParserLPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(141) var _x = p.Expr() + localctx.(*NestedContext).e = _x } { p.SetState(142) p.Match(CELParserRPAREN) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 3: localctx = NewCreateListContext(p, localctx) p.EnterOuterAlt(localctx, 3) @@ -3291,37 +3659,59 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserLBRACKET) localctx.(*CreateListContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(146) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) - if (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&135763153920) != 0 { + + if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135763153920) != 0) { { p.SetState(145) var _x = p.ListInit() + localctx.(*CreateListContext).elems = _x } } p.SetState(149) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserCOMMA { { p.SetState(148) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } { p.SetState(151) p.Match(CELParserRPRACKET) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 4: localctx = NewCreateStructContext(p, localctx) p.EnterOuterAlt(localctx, 4) @@ -3331,44 +3721,70 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserLBRACE) localctx.(*CreateStructContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(154) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) - if (int64(_la) & ^0x3f) == 0 && ((int64(1)<<_la)&135763153920) != 0 { + + if ((int64(_la) & ^0x3f) == 0 && ((int64(1) << _la) & 135763153920) != 0) { { p.SetState(153) var _x = p.MapInitializerList() + localctx.(*CreateStructContext).entries = _x } } p.SetState(157) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserCOMMA { { p.SetState(156) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } { p.SetState(159) p.Match(CELParserRBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 5: localctx = NewCreateMessageContext(p, localctx) p.EnterOuterAlt(localctx, 5) p.SetState(161) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserDOT { { p.SetState(160) @@ -3376,6 +3792,10 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserDOT) localctx.(*CreateMessageContext).leadingDot = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -3385,12 +3805,20 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserIDENTIFIER) localctx.(*CreateMessageContext)._IDENTIFIER = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*CreateMessageContext).ids = append(localctx.(*CreateMessageContext).ids, localctx.(*CreateMessageContext)._IDENTIFIER) p.SetState(168) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + for _la == CELParserDOT { { p.SetState(164) @@ -3398,6 +3826,10 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserDOT) localctx.(*CreateMessageContext).s16 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*CreateMessageContext).ops = append(localctx.(*CreateMessageContext).ops, localctx.(*CreateMessageContext).s16) { @@ -3406,11 +3838,19 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserIDENTIFIER) localctx.(*CreateMessageContext)._IDENTIFIER = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*CreateMessageContext).ids = append(localctx.(*CreateMessageContext).ids, localctx.(*CreateMessageContext)._IDENTIFIER) + p.SetState(170) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) } { @@ -3419,37 +3859,59 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { var _m = p.Match(CELParserLBRACE) localctx.(*CreateMessageContext).op = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } p.SetState(173) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK || _la == CELParserIDENTIFIER { { p.SetState(172) var _x = p.FieldInitializerList() + localctx.(*CreateMessageContext).entries = _x } } p.SetState(176) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserCOMMA { { p.SetState(175) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } { p.SetState(178) p.Match(CELParserRBRACE) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 6: localctx = NewConstantLiteralContext(p, localctx) p.EnterOuterAlt(localctx, 6) @@ -3458,11 +3920,25 @@ func (p *CELParser) Primary() (localctx IPrimaryContext) { p.Literal() } + case antlr.ATNInvalidAltNumber: + goto errorExit } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IExprListContext is an interface to support dynamic dispatch. type IExprListContext interface { antlr.ParserRuleContext @@ -3473,14 +3949,18 @@ type IExprListContext interface { // Get_expr returns the _expr rule contexts. Get_expr() IExprContext + // Set_expr sets the _expr rule contexts. Set_expr(IExprContext) + // GetE returns the e rule context list. GetE() []IExprContext + // SetE sets the e rule context list. - SetE([]IExprContext) + SetE([]IExprContext) + // Getter signatures AllExpr() []IExprContext @@ -3493,25 +3973,30 @@ type IExprListContext interface { } type ExprListContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - _expr IExprContext - e []IExprContext + _expr IExprContext + e []IExprContext } func NewEmptyExprListContext() *ExprListContext { var p = new(ExprListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_exprList return p } +func InitEmptyExprListContext(p *ExprListContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_exprList +} + func (*ExprListContext) IsExprListContext() {} func NewExprListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ExprListContext { var p = new(ExprListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_exprList @@ -3523,12 +4008,16 @@ func (s *ExprListContext) GetParser() antlr.Parser { return s.parser } func (s *ExprListContext) Get_expr() IExprContext { return s._expr } + func (s *ExprListContext) Set_expr(v IExprContext) { s._expr = v } + func (s *ExprListContext) GetE() []IExprContext { return s.e } + func (s *ExprListContext) SetE(v []IExprContext) { s.e = v } + func (s *ExprListContext) AllExpr() []IExprContext { children := s.GetChildren() len := 0 @@ -3551,12 +4040,12 @@ func (s *ExprListContext) AllExpr() []IExprContext { } func (s *ExprListContext) Expr(i int) IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -3586,6 +4075,7 @@ func (s *ExprListContext) ToStringTree(ruleNames []string, recog antlr.Recognize return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *ExprListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterExprList(s) @@ -3608,65 +4098,76 @@ func (s *ExprListContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) ExprList() (localctx IExprListContext) { - this := p - _ = this + + +func (p *CELParser) ExprList() (localctx IExprListContext) { localctx = NewExprListContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 18, CELParserRULE_exprList) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) { p.SetState(182) var _x = p.Expr() + localctx.(*ExprListContext)._expr = _x } localctx.(*ExprListContext).e = append(localctx.(*ExprListContext).e, localctx.(*ExprListContext)._expr) p.SetState(187) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + for _la == CELParserCOMMA { { p.SetState(183) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(184) var _x = p.Expr() + localctx.(*ExprListContext)._expr = _x } localctx.(*ExprListContext).e = append(localctx.(*ExprListContext).e, localctx.(*ExprListContext)._expr) + p.SetState(189) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IListInitContext is an interface to support dynamic dispatch. type IListInitContext interface { antlr.ParserRuleContext @@ -3677,14 +4178,18 @@ type IListInitContext interface { // Get_optExpr returns the _optExpr rule contexts. Get_optExpr() IOptExprContext + // Set_optExpr sets the _optExpr rule contexts. Set_optExpr(IOptExprContext) + // GetElems returns the elems rule context list. GetElems() []IOptExprContext + // SetElems sets the elems rule context list. - SetElems([]IOptExprContext) + SetElems([]IOptExprContext) + // Getter signatures AllOptExpr() []IOptExprContext @@ -3697,25 +4202,30 @@ type IListInitContext interface { } type ListInitContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser - _optExpr IOptExprContext - elems []IOptExprContext + antlr.BaseParserRuleContext + parser antlr.Parser + _optExpr IOptExprContext + elems []IOptExprContext } func NewEmptyListInitContext() *ListInitContext { var p = new(ListInitContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_listInit return p } +func InitEmptyListInitContext(p *ListInitContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_listInit +} + func (*ListInitContext) IsListInitContext() {} func NewListInitContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *ListInitContext { var p = new(ListInitContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_listInit @@ -3727,12 +4237,16 @@ func (s *ListInitContext) GetParser() antlr.Parser { return s.parser } func (s *ListInitContext) Get_optExpr() IOptExprContext { return s._optExpr } + func (s *ListInitContext) Set_optExpr(v IOptExprContext) { s._optExpr = v } + func (s *ListInitContext) GetElems() []IOptExprContext { return s.elems } + func (s *ListInitContext) SetElems(v []IOptExprContext) { s.elems = v } + func (s *ListInitContext) AllOptExpr() []IOptExprContext { children := s.GetChildren() len := 0 @@ -3755,12 +4269,12 @@ func (s *ListInitContext) AllOptExpr() []IOptExprContext { } func (s *ListInitContext) OptExpr(i int) IOptExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IOptExprContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -3790,6 +4304,7 @@ func (s *ListInitContext) ToStringTree(ruleNames []string, recog antlr.Recognize return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *ListInitContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterListInit(s) @@ -3812,29 +4327,12 @@ func (s *ListInitContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) ListInit() (localctx IListInitContext) { - this := p - _ = this - localctx = NewListInitContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 20, CELParserRULE_listInit) - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() +func (p *CELParser) ListInit() (localctx IListInitContext) { + localctx = NewListInitContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 20, CELParserRULE_listInit) var _alt int p.EnterOuterAlt(localctx, 1) @@ -3843,37 +4341,68 @@ func (p *CELParser) ListInit() (localctx IListInitContext) { var _x = p.OptExpr() + localctx.(*ListInitContext)._optExpr = _x } localctx.(*ListInitContext).elems = append(localctx.(*ListInitContext).elems, localctx.(*ListInitContext)._optExpr) p.SetState(195) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 27, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 27, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { { p.SetState(191) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(192) var _x = p.OptExpr() + localctx.(*ListInitContext)._optExpr = _x } localctx.(*ListInitContext).elems = append(localctx.(*ListInitContext).elems, localctx.(*ListInitContext)._optExpr) + } p.SetState(197) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 27, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 27, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IFieldInitializerListContext is an interface to support dynamic dispatch. type IFieldInitializerListContext interface { antlr.ParserRuleContext @@ -3882,40 +4411,48 @@ type IFieldInitializerListContext interface { GetParser() antlr.Parser // GetS21 returns the s21 token. - GetS21() antlr.Token + GetS21() antlr.Token + // SetS21 sets the s21 token. - SetS21(antlr.Token) + SetS21(antlr.Token) + // GetCols returns the cols token list. GetCols() []antlr.Token + // SetCols sets the cols token list. SetCols([]antlr.Token) + // Get_optField returns the _optField rule contexts. Get_optField() IOptFieldContext // Get_expr returns the _expr rule contexts. Get_expr() IExprContext + // Set_optField sets the _optField rule contexts. Set_optField(IOptFieldContext) // Set_expr sets the _expr rule contexts. Set_expr(IExprContext) + // GetFields returns the fields rule context list. GetFields() []IOptFieldContext // GetValues returns the values rule context list. GetValues() []IExprContext + // SetFields sets the fields rule context list. - SetFields([]IOptFieldContext) + SetFields([]IOptFieldContext) // SetValues sets the values rule context list. - SetValues([]IExprContext) + SetValues([]IExprContext) + // Getter signatures AllOptField() []IOptFieldContext @@ -3932,29 +4469,34 @@ type IFieldInitializerListContext interface { } type FieldInitializerListContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser - _optField IOptFieldContext - fields []IOptFieldContext - s21 antlr.Token - cols []antlr.Token - _expr IExprContext - values []IExprContext + antlr.BaseParserRuleContext + parser antlr.Parser + _optField IOptFieldContext + fields []IOptFieldContext + s21 antlr.Token + cols []antlr.Token + _expr IExprContext + values []IExprContext } func NewEmptyFieldInitializerListContext() *FieldInitializerListContext { var p = new(FieldInitializerListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_fieldInitializerList return p } +func InitEmptyFieldInitializerListContext(p *FieldInitializerListContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_fieldInitializerList +} + func (*FieldInitializerListContext) IsFieldInitializerListContext() {} func NewFieldInitializerListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *FieldInitializerListContext { var p = new(FieldInitializerListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_fieldInitializerList @@ -3966,28 +4508,36 @@ func (s *FieldInitializerListContext) GetParser() antlr.Parser { return s.parser func (s *FieldInitializerListContext) GetS21() antlr.Token { return s.s21 } + func (s *FieldInitializerListContext) SetS21(v antlr.Token) { s.s21 = v } + func (s *FieldInitializerListContext) GetCols() []antlr.Token { return s.cols } + func (s *FieldInitializerListContext) SetCols(v []antlr.Token) { s.cols = v } + func (s *FieldInitializerListContext) Get_optField() IOptFieldContext { return s._optField } func (s *FieldInitializerListContext) Get_expr() IExprContext { return s._expr } + func (s *FieldInitializerListContext) Set_optField(v IOptFieldContext) { s._optField = v } func (s *FieldInitializerListContext) Set_expr(v IExprContext) { s._expr = v } + func (s *FieldInitializerListContext) GetFields() []IOptFieldContext { return s.fields } func (s *FieldInitializerListContext) GetValues() []IExprContext { return s.values } + func (s *FieldInitializerListContext) SetFields(v []IOptFieldContext) { s.fields = v } func (s *FieldInitializerListContext) SetValues(v []IExprContext) { s.values = v } + func (s *FieldInitializerListContext) AllOptField() []IOptFieldContext { children := s.GetChildren() len := 0 @@ -4010,12 +4560,12 @@ func (s *FieldInitializerListContext) AllOptField() []IOptFieldContext { } func (s *FieldInitializerListContext) OptField(i int) IOptFieldContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IOptFieldContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -4059,12 +4609,12 @@ func (s *FieldInitializerListContext) AllExpr() []IExprContext { } func (s *FieldInitializerListContext) Expr(i int) IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -4094,6 +4644,7 @@ func (s *FieldInitializerListContext) ToStringTree(ruleNames []string, recog ant return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *FieldInitializerListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterFieldInitializerList(s) @@ -4116,29 +4667,12 @@ func (s *FieldInitializerListContext) Accept(visitor antlr.ParseTreeVisitor) int } } -func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContext) { - this := p - _ = this - localctx = NewFieldInitializerListContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 22, CELParserRULE_fieldInitializerList) - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() +func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContext) { + localctx = NewFieldInitializerListContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 22, CELParserRULE_fieldInitializerList) var _alt int p.EnterOuterAlt(localctx, 1) @@ -4147,6 +4681,7 @@ func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContex var _x = p.OptField() + localctx.(*FieldInitializerListContext)._optField = _x } localctx.(*FieldInitializerListContext).fields = append(localctx.(*FieldInitializerListContext).fields, localctx.(*FieldInitializerListContext)._optField) @@ -4156,6 +4691,10 @@ func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContex var _m = p.Match(CELParserCOLON) localctx.(*FieldInitializerListContext).s21 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*FieldInitializerListContext).cols = append(localctx.(*FieldInitializerListContext).cols, localctx.(*FieldInitializerListContext).s21) { @@ -4163,24 +4702,35 @@ func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContex var _x = p.Expr() + localctx.(*FieldInitializerListContext)._expr = _x } localctx.(*FieldInitializerListContext).values = append(localctx.(*FieldInitializerListContext).values, localctx.(*FieldInitializerListContext)._expr) p.SetState(208) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 28, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 28, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { { p.SetState(201) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(202) var _x = p.OptField() + localctx.(*FieldInitializerListContext)._optField = _x } localctx.(*FieldInitializerListContext).fields = append(localctx.(*FieldInitializerListContext).fields, localctx.(*FieldInitializerListContext)._optField) @@ -4190,6 +4740,10 @@ func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContex var _m = p.Match(CELParserCOLON) localctx.(*FieldInitializerListContext).s21 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*FieldInitializerListContext).cols = append(localctx.(*FieldInitializerListContext).cols, localctx.(*FieldInitializerListContext).s21) { @@ -4197,19 +4751,40 @@ func (p *CELParser) FieldInitializerList() (localctx IFieldInitializerListContex var _x = p.Expr() + localctx.(*FieldInitializerListContext)._expr = _x } localctx.(*FieldInitializerListContext).values = append(localctx.(*FieldInitializerListContext).values, localctx.(*FieldInitializerListContext)._expr) + } p.SetState(210) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 28, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 28, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IOptFieldContext is an interface to support dynamic dispatch. type IOptFieldContext interface { antlr.ParserRuleContext @@ -4218,10 +4793,12 @@ type IOptFieldContext interface { GetParser() antlr.Parser // GetOpt returns the opt token. - GetOpt() antlr.Token + GetOpt() antlr.Token + // SetOpt sets the opt token. - SetOpt(antlr.Token) + SetOpt(antlr.Token) + // Getter signatures IDENTIFIER() antlr.TerminalNode @@ -4232,24 +4809,29 @@ type IOptFieldContext interface { } type OptFieldContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - opt antlr.Token + opt antlr.Token } func NewEmptyOptFieldContext() *OptFieldContext { var p = new(OptFieldContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_optField return p } +func InitEmptyOptFieldContext(p *OptFieldContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_optField +} + func (*OptFieldContext) IsOptFieldContext() {} func NewOptFieldContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OptFieldContext { var p = new(OptFieldContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_optField @@ -4261,8 +4843,10 @@ func (s *OptFieldContext) GetParser() antlr.Parser { return s.parser } func (s *OptFieldContext) GetOpt() antlr.Token { return s.opt } + func (s *OptFieldContext) SetOpt(v antlr.Token) { s.opt = v } + func (s *OptFieldContext) IDENTIFIER() antlr.TerminalNode { return s.GetToken(CELParserIDENTIFIER, 0) } @@ -4279,6 +4863,7 @@ func (s *OptFieldContext) ToStringTree(ruleNames []string, recog antlr.Recognize return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *OptFieldContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterOptField(s) @@ -4301,35 +4886,23 @@ func (s *OptFieldContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) OptField() (localctx IOptFieldContext) { - this := p - _ = this + + +func (p *CELParser) OptField() (localctx IOptFieldContext) { localctx = NewOptFieldContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 24, CELParserRULE_optField) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) p.SetState(212) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK { { p.SetState(211) @@ -4337,17 +4910,38 @@ func (p *CELParser) OptField() (localctx IOptFieldContext) { var _m = p.Match(CELParserQUESTIONMARK) localctx.(*OptFieldContext).opt = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } { p.SetState(214) p.Match(CELParserIDENTIFIER) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IMapInitializerListContext is an interface to support dynamic dispatch. type IMapInitializerListContext interface { antlr.ParserRuleContext @@ -4356,40 +4950,48 @@ type IMapInitializerListContext interface { GetParser() antlr.Parser // GetS21 returns the s21 token. - GetS21() antlr.Token + GetS21() antlr.Token + // SetS21 sets the s21 token. - SetS21(antlr.Token) + SetS21(antlr.Token) + // GetCols returns the cols token list. GetCols() []antlr.Token + // SetCols sets the cols token list. SetCols([]antlr.Token) + // Get_optExpr returns the _optExpr rule contexts. Get_optExpr() IOptExprContext // Get_expr returns the _expr rule contexts. Get_expr() IExprContext + // Set_optExpr sets the _optExpr rule contexts. Set_optExpr(IOptExprContext) // Set_expr sets the _expr rule contexts. Set_expr(IExprContext) + // GetKeys returns the keys rule context list. GetKeys() []IOptExprContext // GetValues returns the values rule context list. GetValues() []IExprContext + // SetKeys sets the keys rule context list. - SetKeys([]IOptExprContext) + SetKeys([]IOptExprContext) // SetValues sets the values rule context list. - SetValues([]IExprContext) + SetValues([]IExprContext) + // Getter signatures AllOptExpr() []IOptExprContext @@ -4406,29 +5008,34 @@ type IMapInitializerListContext interface { } type MapInitializerListContext struct { - *antlr.BaseParserRuleContext - parser antlr.Parser - _optExpr IOptExprContext - keys []IOptExprContext - s21 antlr.Token - cols []antlr.Token - _expr IExprContext - values []IExprContext + antlr.BaseParserRuleContext + parser antlr.Parser + _optExpr IOptExprContext + keys []IOptExprContext + s21 antlr.Token + cols []antlr.Token + _expr IExprContext + values []IExprContext } func NewEmptyMapInitializerListContext() *MapInitializerListContext { var p = new(MapInitializerListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_mapInitializerList return p } +func InitEmptyMapInitializerListContext(p *MapInitializerListContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_mapInitializerList +} + func (*MapInitializerListContext) IsMapInitializerListContext() {} func NewMapInitializerListContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *MapInitializerListContext { var p = new(MapInitializerListContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_mapInitializerList @@ -4440,28 +5047,36 @@ func (s *MapInitializerListContext) GetParser() antlr.Parser { return s.parser } func (s *MapInitializerListContext) GetS21() antlr.Token { return s.s21 } + func (s *MapInitializerListContext) SetS21(v antlr.Token) { s.s21 = v } + func (s *MapInitializerListContext) GetCols() []antlr.Token { return s.cols } + func (s *MapInitializerListContext) SetCols(v []antlr.Token) { s.cols = v } + func (s *MapInitializerListContext) Get_optExpr() IOptExprContext { return s._optExpr } func (s *MapInitializerListContext) Get_expr() IExprContext { return s._expr } + func (s *MapInitializerListContext) Set_optExpr(v IOptExprContext) { s._optExpr = v } func (s *MapInitializerListContext) Set_expr(v IExprContext) { s._expr = v } + func (s *MapInitializerListContext) GetKeys() []IOptExprContext { return s.keys } func (s *MapInitializerListContext) GetValues() []IExprContext { return s.values } + func (s *MapInitializerListContext) SetKeys(v []IOptExprContext) { s.keys = v } func (s *MapInitializerListContext) SetValues(v []IExprContext) { s.values = v } + func (s *MapInitializerListContext) AllOptExpr() []IOptExprContext { children := s.GetChildren() len := 0 @@ -4484,12 +5099,12 @@ func (s *MapInitializerListContext) AllOptExpr() []IOptExprContext { } func (s *MapInitializerListContext) OptExpr(i int) IOptExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IOptExprContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -4533,12 +5148,12 @@ func (s *MapInitializerListContext) AllExpr() []IExprContext { } func (s *MapInitializerListContext) Expr(i int) IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; j := 0 for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { if j == i { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } j++ @@ -4568,6 +5183,7 @@ func (s *MapInitializerListContext) ToStringTree(ruleNames []string, recog antlr return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *MapInitializerListContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterMapInitializerList(s) @@ -4590,29 +5206,12 @@ func (s *MapInitializerListContext) Accept(visitor antlr.ParseTreeVisitor) inter } } -func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { - this := p - _ = this - localctx = NewMapInitializerListContext(p, p.GetParserRuleContext(), p.GetState()) - p.EnterRule(localctx, 26, CELParserRULE_mapInitializerList) - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() +func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { + localctx = NewMapInitializerListContext(p, p.GetParserRuleContext(), p.GetState()) + p.EnterRule(localctx, 26, CELParserRULE_mapInitializerList) var _alt int p.EnterOuterAlt(localctx, 1) @@ -4621,6 +5220,7 @@ func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { var _x = p.OptExpr() + localctx.(*MapInitializerListContext)._optExpr = _x } localctx.(*MapInitializerListContext).keys = append(localctx.(*MapInitializerListContext).keys, localctx.(*MapInitializerListContext)._optExpr) @@ -4630,6 +5230,10 @@ func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { var _m = p.Match(CELParserCOLON) localctx.(*MapInitializerListContext).s21 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*MapInitializerListContext).cols = append(localctx.(*MapInitializerListContext).cols, localctx.(*MapInitializerListContext).s21) { @@ -4637,24 +5241,35 @@ func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { var _x = p.Expr() + localctx.(*MapInitializerListContext)._expr = _x } localctx.(*MapInitializerListContext).values = append(localctx.(*MapInitializerListContext).values, localctx.(*MapInitializerListContext)._expr) p.SetState(226) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 30, p.GetParserRuleContext()) - + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 30, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } for _alt != 2 && _alt != antlr.ATNInvalidAltNumber { if _alt == 1 { { p.SetState(219) p.Match(CELParserCOMMA) + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } { p.SetState(220) var _x = p.OptExpr() + localctx.(*MapInitializerListContext)._optExpr = _x } localctx.(*MapInitializerListContext).keys = append(localctx.(*MapInitializerListContext).keys, localctx.(*MapInitializerListContext)._optExpr) @@ -4664,6 +5279,10 @@ func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { var _m = p.Match(CELParserCOLON) localctx.(*MapInitializerListContext).s21 = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } localctx.(*MapInitializerListContext).cols = append(localctx.(*MapInitializerListContext).cols, localctx.(*MapInitializerListContext).s21) { @@ -4671,19 +5290,40 @@ func (p *CELParser) MapInitializerList() (localctx IMapInitializerListContext) { var _x = p.Expr() + localctx.(*MapInitializerListContext)._expr = _x } localctx.(*MapInitializerListContext).values = append(localctx.(*MapInitializerListContext).values, localctx.(*MapInitializerListContext)._expr) + } p.SetState(228) p.GetErrorHandler().Sync(p) - _alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 30, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } + _alt = p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 30, p.GetParserRuleContext()) + if p.HasError() { + goto errorExit + } } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // IOptExprContext is an interface to support dynamic dispatch. type IOptExprContext interface { antlr.ParserRuleContext @@ -4692,17 +5332,21 @@ type IOptExprContext interface { GetParser() antlr.Parser // GetOpt returns the opt token. - GetOpt() antlr.Token + GetOpt() antlr.Token + // SetOpt sets the opt token. - SetOpt(antlr.Token) + SetOpt(antlr.Token) + // GetE returns the e rule contexts. GetE() IExprContext + // SetE sets the e rule contexts. SetE(IExprContext) + // Getter signatures Expr() IExprContext QUESTIONMARK() antlr.TerminalNode @@ -4712,25 +5356,30 @@ type IOptExprContext interface { } type OptExprContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser - opt antlr.Token - e IExprContext + opt antlr.Token + e IExprContext } func NewEmptyOptExprContext() *OptExprContext { var p = new(OptExprContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_optExpr return p } +func InitEmptyOptExprContext(p *OptExprContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_optExpr +} + func (*OptExprContext) IsOptExprContext() {} func NewOptExprContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *OptExprContext { var p = new(OptExprContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_optExpr @@ -4742,17 +5391,21 @@ func (s *OptExprContext) GetParser() antlr.Parser { return s.parser } func (s *OptExprContext) GetOpt() antlr.Token { return s.opt } + func (s *OptExprContext) SetOpt(v antlr.Token) { s.opt = v } + func (s *OptExprContext) GetE() IExprContext { return s.e } + func (s *OptExprContext) SetE(v IExprContext) { s.e = v } + func (s *OptExprContext) Expr() IExprContext { - var t antlr.RuleContext + var t antlr.RuleContext; for _, ctx := range s.GetChildren() { if _, ok := ctx.(IExprContext); ok { - t = ctx.(antlr.RuleContext) + t = ctx.(antlr.RuleContext); break } } @@ -4776,6 +5429,7 @@ func (s *OptExprContext) ToStringTree(ruleNames []string, recog antlr.Recognizer return antlr.TreesStringTree(s, ruleNames, recog) } + func (s *OptExprContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterOptExpr(s) @@ -4798,35 +5452,23 @@ func (s *OptExprContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) OptExpr() (localctx IOptExprContext) { - this := p - _ = this + + +func (p *CELParser) OptExpr() (localctx IOptExprContext) { localctx = NewOptExprContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 28, CELParserRULE_optExpr) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.EnterOuterAlt(localctx, 1) p.SetState(230) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserQUESTIONMARK { { p.SetState(229) @@ -4834,6 +5476,10 @@ func (p *CELParser) OptExpr() (localctx IOptExprContext) { var _m = p.Match(CELParserQUESTIONMARK) localctx.(*OptExprContext).opt = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -4842,12 +5488,26 @@ func (p *CELParser) OptExpr() (localctx IOptExprContext) { var _x = p.Expr() + localctx.(*OptExprContext).e = _x } + + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + // ILiteralContext is an interface to support dynamic dispatch. type ILiteralContext interface { antlr.ParserRuleContext @@ -4859,23 +5519,28 @@ type ILiteralContext interface { } type LiteralContext struct { - *antlr.BaseParserRuleContext + antlr.BaseParserRuleContext parser antlr.Parser } func NewEmptyLiteralContext() *LiteralContext { var p = new(LiteralContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(nil, -1) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) p.RuleIndex = CELParserRULE_literal return p } +func InitEmptyLiteralContext(p *LiteralContext) { + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, nil, -1) + p.RuleIndex = CELParserRULE_literal +} + func (*LiteralContext) IsLiteralContext() {} func NewLiteralContext(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int) *LiteralContext { var p = new(LiteralContext) - p.BaseParserRuleContext = antlr.NewBaseParserRuleContext(parent, invokingState) + antlr.InitBaseParserRuleContext(&p.BaseParserRuleContext, parent, invokingState) p.parser = parser p.RuleIndex = CELParserRULE_literal @@ -4885,8 +5550,8 @@ func NewLiteralContext(parser antlr.Parser, parent antlr.ParserRuleContext, invo func (s *LiteralContext) GetParser() antlr.Parser { return s.parser } -func (s *LiteralContext) CopyFrom(ctx *LiteralContext) { - s.BaseParserRuleContext.CopyFrom(ctx.BaseParserRuleContext) +func (s *LiteralContext) CopyAll(ctx *LiteralContext) { + s.CopyFrom(&ctx.BaseParserRuleContext) } func (s *LiteralContext) GetRuleContext() antlr.RuleContext { @@ -4897,23 +5562,28 @@ func (s *LiteralContext) ToStringTree(ruleNames []string, recog antlr.Recognizer return antlr.TreesStringTree(s, ruleNames, recog) } + + + type BytesContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewBytesContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BytesContext { var p = new(BytesContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *BytesContext) GetTok() antlr.Token { return s.tok } + func (s *BytesContext) SetTok(v antlr.Token) { s.tok = v } func (s *BytesContext) GetRuleContext() antlr.RuleContext { @@ -4924,6 +5594,7 @@ func (s *BytesContext) BYTES() antlr.TerminalNode { return s.GetToken(CELParserBYTES, 0) } + func (s *BytesContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterBytes(s) @@ -4946,23 +5617,26 @@ func (s *BytesContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type UintContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewUintContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *UintContext { var p = new(UintContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *UintContext) GetTok() antlr.Token { return s.tok } + func (s *UintContext) SetTok(v antlr.Token) { s.tok = v } func (s *UintContext) GetRuleContext() antlr.RuleContext { @@ -4973,6 +5647,7 @@ func (s *UintContext) NUM_UINT() antlr.TerminalNode { return s.GetToken(CELParserNUM_UINT, 0) } + func (s *UintContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterUint(s) @@ -4995,23 +5670,26 @@ func (s *UintContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type NullContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewNullContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *NullContext { var p = new(NullContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *NullContext) GetTok() antlr.Token { return s.tok } + func (s *NullContext) SetTok(v antlr.Token) { s.tok = v } func (s *NullContext) GetRuleContext() antlr.RuleContext { @@ -5022,6 +5700,7 @@ func (s *NullContext) NUL() antlr.TerminalNode { return s.GetToken(CELParserNUL, 0) } + func (s *NullContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterNull(s) @@ -5044,23 +5723,26 @@ func (s *NullContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type BoolFalseContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewBoolFalseContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BoolFalseContext { var p = new(BoolFalseContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *BoolFalseContext) GetTok() antlr.Token { return s.tok } + func (s *BoolFalseContext) SetTok(v antlr.Token) { s.tok = v } func (s *BoolFalseContext) GetRuleContext() antlr.RuleContext { @@ -5071,6 +5753,7 @@ func (s *BoolFalseContext) CEL_FALSE() antlr.TerminalNode { return s.GetToken(CELParserCEL_FALSE, 0) } + func (s *BoolFalseContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterBoolFalse(s) @@ -5093,23 +5776,26 @@ func (s *BoolFalseContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type StringContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewStringContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *StringContext { var p = new(StringContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *StringContext) GetTok() antlr.Token { return s.tok } + func (s *StringContext) SetTok(v antlr.Token) { s.tok = v } func (s *StringContext) GetRuleContext() antlr.RuleContext { @@ -5120,6 +5806,7 @@ func (s *StringContext) STRING() antlr.TerminalNode { return s.GetToken(CELParserSTRING, 0) } + func (s *StringContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterString(s) @@ -5142,26 +5829,29 @@ func (s *StringContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type DoubleContext struct { - *LiteralContext + LiteralContext sign antlr.Token - tok antlr.Token + tok antlr.Token } func NewDoubleContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *DoubleContext { var p = new(DoubleContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *DoubleContext) GetSign() antlr.Token { return s.sign } func (s *DoubleContext) GetTok() antlr.Token { return s.tok } + func (s *DoubleContext) SetSign(v antlr.Token) { s.sign = v } func (s *DoubleContext) SetTok(v antlr.Token) { s.tok = v } @@ -5178,6 +5868,7 @@ func (s *DoubleContext) MINUS() antlr.TerminalNode { return s.GetToken(CELParserMINUS, 0) } + func (s *DoubleContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterDouble(s) @@ -5200,23 +5891,26 @@ func (s *DoubleContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type BoolTrueContext struct { - *LiteralContext + LiteralContext tok antlr.Token } func NewBoolTrueContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *BoolTrueContext { var p = new(BoolTrueContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *BoolTrueContext) GetTok() antlr.Token { return s.tok } + func (s *BoolTrueContext) SetTok(v antlr.Token) { s.tok = v } func (s *BoolTrueContext) GetRuleContext() antlr.RuleContext { @@ -5227,6 +5921,7 @@ func (s *BoolTrueContext) CEL_TRUE() antlr.TerminalNode { return s.GetToken(CELParserCEL_TRUE, 0) } + func (s *BoolTrueContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterBoolTrue(s) @@ -5249,26 +5944,29 @@ func (s *BoolTrueContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } + type IntContext struct { - *LiteralContext + LiteralContext sign antlr.Token - tok antlr.Token + tok antlr.Token } func NewIntContext(parser antlr.Parser, ctx antlr.ParserRuleContext) *IntContext { var p = new(IntContext) - p.LiteralContext = NewEmptyLiteralContext() + InitEmptyLiteralContext(&p.LiteralContext) p.parser = parser - p.CopyFrom(ctx.(*LiteralContext)) + p.CopyAll(ctx.(*LiteralContext)) return p } + func (s *IntContext) GetSign() antlr.Token { return s.sign } func (s *IntContext) GetTok() antlr.Token { return s.tok } + func (s *IntContext) SetSign(v antlr.Token) { s.sign = v } func (s *IntContext) SetTok(v antlr.Token) { s.tok = v } @@ -5285,6 +5983,7 @@ func (s *IntContext) MINUS() antlr.TerminalNode { return s.GetToken(CELParserMINUS, 0) } + func (s *IntContext) EnterRule(listener antlr.ParseTreeListener) { if listenerT, ok := listener.(CELListener); ok { listenerT.EnterInt(s) @@ -5307,40 +6006,31 @@ func (s *IntContext) Accept(visitor antlr.ParseTreeVisitor) interface{} { } } -func (p *CELParser) Literal() (localctx ILiteralContext) { - this := p - _ = this + +func (p *CELParser) Literal() (localctx ILiteralContext) { localctx = NewLiteralContext(p, p.GetParserRuleContext(), p.GetState()) p.EnterRule(localctx, 30, CELParserRULE_literal) var _la int - defer func() { - p.ExitRule() - }() - - defer func() { - if err := recover(); err != nil { - if v, ok := err.(antlr.RecognitionException); ok { - localctx.SetException(v) - p.GetErrorHandler().ReportError(p, v) - p.GetErrorHandler().Recover(p, v) - } else { - panic(err) - } - } - }() - p.SetState(248) p.GetErrorHandler().Sync(p) - switch p.GetInterpreter().AdaptivePredict(p.GetTokenStream(), 34, p.GetParserRuleContext()) { + if p.HasError() { + goto errorExit + } + + switch p.GetInterpreter().AdaptivePredict(p.BaseParser, p.GetTokenStream(), 34, p.GetParserRuleContext()) { case 1: localctx = NewIntContext(p, localctx) p.EnterOuterAlt(localctx, 1) p.SetState(235) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserMINUS { { p.SetState(234) @@ -5348,6 +6038,10 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserMINUS) localctx.(*IntContext).sign = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -5357,8 +6051,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserNUM_INT) localctx.(*IntContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 2: localctx = NewUintContext(p, localctx) p.EnterOuterAlt(localctx, 2) @@ -5368,15 +6067,24 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserNUM_UINT) localctx.(*UintContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 3: localctx = NewDoubleContext(p, localctx) p.EnterOuterAlt(localctx, 3) p.SetState(240) p.GetErrorHandler().Sync(p) + if p.HasError() { + goto errorExit + } _la = p.GetTokenStream().LA(1) + if _la == CELParserMINUS { { p.SetState(239) @@ -5384,6 +6092,10 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserMINUS) localctx.(*DoubleContext).sign = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } } @@ -5393,8 +6105,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserNUM_FLOAT) localctx.(*DoubleContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 4: localctx = NewStringContext(p, localctx) p.EnterOuterAlt(localctx, 4) @@ -5404,8 +6121,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserSTRING) localctx.(*StringContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 5: localctx = NewBytesContext(p, localctx) p.EnterOuterAlt(localctx, 5) @@ -5415,8 +6137,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserBYTES) localctx.(*BytesContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 6: localctx = NewBoolTrueContext(p, localctx) p.EnterOuterAlt(localctx, 6) @@ -5426,8 +6153,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserCEL_TRUE) localctx.(*BoolTrueContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 7: localctx = NewBoolFalseContext(p, localctx) p.EnterOuterAlt(localctx, 7) @@ -5437,8 +6169,13 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserCEL_FALSE) localctx.(*BoolFalseContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case 8: localctx = NewNullContext(p, localctx) p.EnterOuterAlt(localctx, 8) @@ -5448,35 +6185,48 @@ func (p *CELParser) Literal() (localctx ILiteralContext) { var _m = p.Match(CELParserNUL) localctx.(*NullContext).tok = _m + if p.HasError() { + // Recognition error - abort rule + goto errorExit + } } + case antlr.ATNInvalidAltNumber: + goto errorExit } + +errorExit: + if p.HasError() { + v := p.GetError() + localctx.SetException(v) + p.GetErrorHandler().ReportError(p, v) + p.GetErrorHandler().Recover(p, v) + p.SetError(nil) + } + p.ExitRule() return localctx + goto errorExit // Trick to prevent compiler error if the label is not used } + func (p *CELParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int) bool { switch ruleIndex { case 4: - var t *RelationContext = nil - if localctx != nil { - t = localctx.(*RelationContext) - } - return p.Relation_Sempred(t, predIndex) + var t *RelationContext = nil + if localctx != nil { t = localctx.(*RelationContext) } + return p.Relation_Sempred(t, predIndex) case 5: - var t *CalcContext = nil - if localctx != nil { - t = localctx.(*CalcContext) - } - return p.Calc_Sempred(t, predIndex) + var t *CalcContext = nil + if localctx != nil { t = localctx.(*CalcContext) } + return p.Calc_Sempred(t, predIndex) case 7: - var t *MemberContext = nil - if localctx != nil { - t = localctx.(*MemberContext) - } - return p.Member_Sempred(t, predIndex) + var t *MemberContext = nil + if localctx != nil { t = localctx.(*MemberContext) } + return p.Member_Sempred(t, predIndex) + default: panic("No predicate with index: " + fmt.Sprint(ruleIndex)) @@ -5484,12 +6234,9 @@ func (p *CELParser) Sempred(localctx antlr.RuleContext, ruleIndex, predIndex int } func (p *CELParser) Relation_Sempred(localctx antlr.RuleContext, predIndex int) bool { - this := p - _ = this - switch predIndex { case 0: - return p.Precpred(p.GetParserRuleContext(), 1) + return p.Precpred(p.GetParserRuleContext(), 1) default: panic("No predicate with index: " + fmt.Sprint(predIndex)) @@ -5497,15 +6244,12 @@ func (p *CELParser) Relation_Sempred(localctx antlr.RuleContext, predIndex int) } func (p *CELParser) Calc_Sempred(localctx antlr.RuleContext, predIndex int) bool { - this := p - _ = this - switch predIndex { case 1: - return p.Precpred(p.GetParserRuleContext(), 2) + return p.Precpred(p.GetParserRuleContext(), 2) case 2: - return p.Precpred(p.GetParserRuleContext(), 1) + return p.Precpred(p.GetParserRuleContext(), 1) default: panic("No predicate with index: " + fmt.Sprint(predIndex)) @@ -5513,20 +6257,18 @@ func (p *CELParser) Calc_Sempred(localctx antlr.RuleContext, predIndex int) bool } func (p *CELParser) Member_Sempred(localctx antlr.RuleContext, predIndex int) bool { - this := p - _ = this - switch predIndex { case 3: - return p.Precpred(p.GetParserRuleContext(), 3) + return p.Precpred(p.GetParserRuleContext(), 3) case 4: - return p.Precpred(p.GetParserRuleContext(), 2) + return p.Precpred(p.GetParserRuleContext(), 2) case 5: - return p.Precpred(p.GetParserRuleContext(), 1) + return p.Precpred(p.GetParserRuleContext(), 1) default: panic("No predicate with index: " + fmt.Sprint(predIndex)) } } + diff --git a/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go b/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go index 2c54e2cb0..d2fbd563a 100644 --- a/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go +++ b/vendor/github.com/google/cel-go/parser/gen/cel_visitor.go @@ -1,7 +1,8 @@ -// Code generated from /Users/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.12.0. DO NOT EDIT. +// Code generated from /usr/local/google/home/tswadell/go/src/github.com/google/cel-go/parser/gen/CEL.g4 by ANTLR 4.13.1. DO NOT EDIT. package gen // CEL -import "github.com/antlr/antlr4/runtime/Go/antlr/v4" +import "github.com/antlr4-go/antlr/v4" + // A complete Visitor for a parse tree produced by CELParser. type CELVisitor interface { @@ -105,4 +106,5 @@ type CELVisitor interface { // Visit a parse tree produced by CELParser#Null. VisitNull(ctx *NullContext) interface{} -} + +} \ No newline at end of file diff --git a/vendor/github.com/google/cel-go/parser/gen/generate.sh b/vendor/github.com/google/cel-go/parser/gen/generate.sh index 389107c6a..27a9559f7 100644 --- a/vendor/github.com/google/cel-go/parser/gen/generate.sh +++ b/vendor/github.com/google/cel-go/parser/gen/generate.sh @@ -27,7 +27,7 @@ DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # Generate AntLR artifacts. -java -Xmx500M -cp ${DIR}/antlr-4.12.0-complete.jar org.antlr.v4.Tool \ +java -Xmx500M -cp ${DIR}/antlr-4.13.1-complete.jar org.antlr.v4.Tool \ -Dlanguage=Go \ -package gen \ -o ${DIR} \ diff --git a/vendor/github.com/google/cel-go/parser/helper.go b/vendor/github.com/google/cel-go/parser/helper.go index 8f8f478ed..182ff034c 100644 --- a/vendor/github.com/google/cel-go/parser/helper.go +++ b/vendor/github.com/google/cel-go/parser/helper.go @@ -17,347 +17,288 @@ package parser import ( "sync" - antlr "github.com/antlr/antlr4/runtime/Go/antlr/v4" + antlr "github.com/antlr4-go/antlr/v4" "github.com/google/cel-go/common" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/ast" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) type parserHelper struct { - source common.Source - nextID int64 - positions map[int64]int32 - macroCalls map[int64]*exprpb.Expr + exprFactory ast.ExprFactory + source common.Source + sourceInfo *ast.SourceInfo + nextID int64 } -func newParserHelper(source common.Source) *parserHelper { +func newParserHelper(source common.Source, fac ast.ExprFactory) *parserHelper { return &parserHelper{ - source: source, - nextID: 1, - positions: make(map[int64]int32), - macroCalls: make(map[int64]*exprpb.Expr), + exprFactory: fac, + source: source, + sourceInfo: ast.NewSourceInfo(source), + nextID: 1, } } -func (p *parserHelper) getSourceInfo() *exprpb.SourceInfo { - return &exprpb.SourceInfo{ - Location: p.source.Description(), - Positions: p.positions, - LineOffsets: p.source.LineOffsets(), - MacroCalls: p.macroCalls} +func (p *parserHelper) getSourceInfo() *ast.SourceInfo { + return p.sourceInfo } -func (p *parserHelper) newLiteral(ctx any, value *exprpb.Constant) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ConstExpr{ConstExpr: value} - return exprNode +func (p *parserHelper) newLiteral(ctx any, value ref.Val) ast.Expr { + return p.exprFactory.NewLiteral(p.newID(ctx), value) } -func (p *parserHelper) newLiteralBool(ctx any, value bool) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BoolValue{BoolValue: value}}) +func (p *parserHelper) newLiteralBool(ctx any, value bool) ast.Expr { + return p.newLiteral(ctx, types.Bool(value)) } -func (p *parserHelper) newLiteralString(ctx any, value string) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_StringValue{StringValue: value}}) +func (p *parserHelper) newLiteralString(ctx any, value string) ast.Expr { + return p.newLiteral(ctx, types.String(value)) } -func (p *parserHelper) newLiteralBytes(ctx any, value []byte) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_BytesValue{BytesValue: value}}) +func (p *parserHelper) newLiteralBytes(ctx any, value []byte) ast.Expr { + return p.newLiteral(ctx, types.Bytes(value)) } -func (p *parserHelper) newLiteralInt(ctx any, value int64) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_Int64Value{Int64Value: value}}) +func (p *parserHelper) newLiteralInt(ctx any, value int64) ast.Expr { + return p.newLiteral(ctx, types.Int(value)) } -func (p *parserHelper) newLiteralUint(ctx any, value uint64) *exprpb.Expr { - return p.newLiteral(ctx, &exprpb.Constant{ConstantKind: &exprpb.Constant_Uint64Value{Uint64Value: value}}) +func (p *parserHelper) newLiteralUint(ctx any, value uint64) ast.Expr { + return p.newLiteral(ctx, types.Uint(value)) } -func (p *parserHelper) newLiteralDouble(ctx any, value float64) *exprpb.Expr { - return p.newLiteral(ctx, - &exprpb.Constant{ConstantKind: &exprpb.Constant_DoubleValue{DoubleValue: value}}) +func (p *parserHelper) newLiteralDouble(ctx any, value float64) ast.Expr { + return p.newLiteral(ctx, types.Double(value)) } -func (p *parserHelper) newIdent(ctx any, name string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_IdentExpr{IdentExpr: &exprpb.Expr_Ident{Name: name}} - return exprNode +func (p *parserHelper) newIdent(ctx any, name string) ast.Expr { + return p.exprFactory.NewIdent(p.newID(ctx), name) } -func (p *parserHelper) newSelect(ctx any, operand *exprpb.Expr, field string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{Operand: operand, Field: field}} - return exprNode +func (p *parserHelper) newSelect(ctx any, operand ast.Expr, field string) ast.Expr { + return p.exprFactory.NewSelect(p.newID(ctx), operand, field) } -func (p *parserHelper) newPresenceTest(ctx any, operand *exprpb.Expr, field string) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_SelectExpr{ - SelectExpr: &exprpb.Expr_Select{Operand: operand, Field: field, TestOnly: true}} - return exprNode +func (p *parserHelper) newPresenceTest(ctx any, operand ast.Expr, field string) ast.Expr { + return p.exprFactory.NewPresenceTest(p.newID(ctx), operand, field) } -func (p *parserHelper) newGlobalCall(ctx any, function string, args ...*exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{Function: function, Args: args}} - return exprNode +func (p *parserHelper) newGlobalCall(ctx any, function string, args ...ast.Expr) ast.Expr { + return p.exprFactory.NewCall(p.newID(ctx), function, args...) } -func (p *parserHelper) newReceiverCall(ctx any, function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{Function: function, Target: target, Args: args}} - return exprNode +func (p *parserHelper) newReceiverCall(ctx any, function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return p.exprFactory.NewMemberCall(p.newID(ctx), function, target, args...) } -func (p *parserHelper) newList(ctx any, elements []*exprpb.Expr, optionals ...int32) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: elements, - OptionalIndices: optionals, - }} - return exprNode +func (p *parserHelper) newList(ctx any, elements []ast.Expr, optionals ...int32) ast.Expr { + return p.exprFactory.NewList(p.newID(ctx), elements, optionals) } -func (p *parserHelper) newMap(ctx any, entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{Entries: entries}} - return exprNode +func (p *parserHelper) newMap(ctx any, entries ...ast.EntryExpr) ast.Expr { + return p.exprFactory.NewMap(p.newID(ctx), entries) } -func (p *parserHelper) newMapEntry(entryID int64, key *exprpb.Expr, value *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return &exprpb.Expr_CreateStruct_Entry{ - Id: entryID, - KeyKind: &exprpb.Expr_CreateStruct_Entry_MapKey{MapKey: key}, - Value: value, - OptionalEntry: optional, - } +func (p *parserHelper) newMapEntry(entryID int64, key ast.Expr, value ast.Expr, optional bool) ast.EntryExpr { + return p.exprFactory.NewMapEntry(entryID, key, value, optional) } -func (p *parserHelper) newObject(ctx any, typeName string, entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: typeName, - Entries: entries, - }, - } - return exprNode +func (p *parserHelper) newObject(ctx any, typeName string, fields ...ast.EntryExpr) ast.Expr { + return p.exprFactory.NewStruct(p.newID(ctx), typeName, fields) } -func (p *parserHelper) newObjectField(fieldID int64, field string, value *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return &exprpb.Expr_CreateStruct_Entry{ - Id: fieldID, - KeyKind: &exprpb.Expr_CreateStruct_Entry_FieldKey{FieldKey: field}, - Value: value, - OptionalEntry: optional, - } +func (p *parserHelper) newObjectField(fieldID int64, field string, value ast.Expr, optional bool) ast.EntryExpr { + return p.exprFactory.NewStructField(fieldID, field, value, optional) } -func (p *parserHelper) newComprehension(ctx any, iterVar string, - iterRange *exprpb.Expr, +func (p *parserHelper) newComprehension(ctx any, + iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr { - exprNode := p.newExpr(ctx) - exprNode.ExprKind = &exprpb.Expr_ComprehensionExpr{ - ComprehensionExpr: &exprpb.Expr_Comprehension{ - AccuVar: accuVar, - AccuInit: accuInit, - IterVar: iterVar, - IterRange: iterRange, - LoopCondition: condition, - LoopStep: step, - Result: result}} - return exprNode -} - -func (p *parserHelper) newExpr(ctx any) *exprpb.Expr { - id, isID := ctx.(int64) - if isID { - return &exprpb.Expr{Id: id} + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr { + return p.exprFactory.NewComprehension( + p.newID(ctx), iterRange, iterVar, accuVar, accuInit, condition, step, result) +} + +func (p *parserHelper) newID(ctx any) int64 { + if id, isID := ctx.(int64); isID { + return id } - return &exprpb.Expr{Id: p.id(ctx)} + return p.id(ctx) +} + +func (p *parserHelper) newExpr(ctx any) ast.Expr { + return p.exprFactory.NewUnspecifiedExpr(p.newID(ctx)) } func (p *parserHelper) id(ctx any) int64 { - var location common.Location - switch ctx.(type) { + var offset ast.OffsetRange + switch c := ctx.(type) { case antlr.ParserRuleContext: - token := (ctx.(antlr.ParserRuleContext)).GetStart() - location = p.source.NewLocation(token.GetLine(), token.GetColumn()) + start, stop := c.GetStart(), c.GetStop() + if stop == nil { + stop = start + } + offset.Start = p.sourceInfo.ComputeOffset(int32(start.GetLine()), int32(start.GetColumn())) + offset.Stop = p.sourceInfo.ComputeOffset(int32(stop.GetLine()), int32(stop.GetColumn())) case antlr.Token: - token := ctx.(antlr.Token) - location = p.source.NewLocation(token.GetLine(), token.GetColumn()) + offset.Start = p.sourceInfo.ComputeOffset(int32(c.GetLine()), int32(c.GetColumn())) + offset.Stop = offset.Start case common.Location: - location = ctx.(common.Location) + offset.Start = p.sourceInfo.ComputeOffset(int32(c.Line()), int32(c.Column())) + offset.Stop = offset.Start + case ast.OffsetRange: + offset = c default: // This should only happen if the ctx is nil return -1 } id := p.nextID - p.positions[id], _ = p.source.LocationOffset(location) + p.sourceInfo.SetOffsetRange(id, offset) p.nextID++ return id } func (p *parserHelper) getLocation(id int64) common.Location { - characterOffset := p.positions[id] - location, _ := p.source.OffsetLocation(characterOffset) - return location + return p.sourceInfo.GetStartLocation(id) } // buildMacroCallArg iterates the expression and returns a new expression // where all macros have been replaced by their IDs in MacroCalls -func (p *parserHelper) buildMacroCallArg(expr *exprpb.Expr) *exprpb.Expr { - if _, found := p.macroCalls[expr.GetId()]; found { - return &exprpb.Expr{Id: expr.GetId()} +func (p *parserHelper) buildMacroCallArg(expr ast.Expr) ast.Expr { + if _, found := p.sourceInfo.GetMacroCall(expr.ID()); found { + return p.exprFactory.NewUnspecifiedExpr(expr.ID()) } - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: + switch expr.Kind() { + case ast.CallKind: // Iterate the AST from `expr` recursively looking for macros. Because we are at most // starting from the top level macro, this recursion is bounded by the size of the AST. This // means that the depth check on the AST during parsing will catch recursion overflows // before we get to here. - macroTarget := expr.GetCallExpr().GetTarget() - if macroTarget != nil { - macroTarget = p.buildMacroCallArg(macroTarget) - } - macroArgs := make([]*exprpb.Expr, len(expr.GetCallExpr().GetArgs())) - for index, arg := range expr.GetCallExpr().GetArgs() { + call := expr.AsCall() + macroArgs := make([]ast.Expr, len(call.Args())) + for index, arg := range call.Args() { macroArgs[index] = p.buildMacroCallArg(arg) } - return &exprpb.Expr{ - Id: expr.GetId(), - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Target: macroTarget, - Function: expr.GetCallExpr().GetFunction(), - Args: macroArgs, - }, - }, + if !call.IsMemberFunction() { + return p.exprFactory.NewCall(expr.ID(), call.FunctionName(), macroArgs...) } - case *exprpb.Expr_ListExpr: - listExpr := expr.GetListExpr() - macroListArgs := make([]*exprpb.Expr, len(listExpr.GetElements())) - for i, elem := range listExpr.GetElements() { + macroTarget := p.buildMacroCallArg(call.Target()) + return p.exprFactory.NewMemberCall(expr.ID(), call.FunctionName(), macroTarget, macroArgs...) + case ast.ListKind: + list := expr.AsList() + macroListArgs := make([]ast.Expr, list.Size()) + for i, elem := range list.Elements() { macroListArgs[i] = p.buildMacroCallArg(elem) } - return &exprpb.Expr{ - Id: expr.GetId(), - ExprKind: &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{ - Elements: macroListArgs, - OptionalIndices: listExpr.GetOptionalIndices(), - }, - }, - } + return p.exprFactory.NewList(expr.ID(), macroListArgs, list.OptionalIndices()) } - return expr } // addMacroCall adds the macro the the MacroCalls map in source info. If a macro has args/subargs/target // that are macros, their ID will be stored instead for later self-lookups. -func (p *parserHelper) addMacroCall(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) { - macroTarget := target - if target != nil { - if _, found := p.macroCalls[target.GetId()]; found { - macroTarget = &exprpb.Expr{Id: target.GetId()} - } else { - macroTarget = p.buildMacroCallArg(target) - } - } - - macroArgs := make([]*exprpb.Expr, len(args)) +func (p *parserHelper) addMacroCall(exprID int64, function string, target ast.Expr, args ...ast.Expr) { + macroArgs := make([]ast.Expr, len(args)) for index, arg := range args { macroArgs[index] = p.buildMacroCallArg(arg) } - - p.macroCalls[exprID] = &exprpb.Expr{ - ExprKind: &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Target: macroTarget, - Function: function, - Args: macroArgs, - }, - }, + if target == nil { + p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewCall(0, function, macroArgs...)) + return } + macroTarget := target + if _, found := p.sourceInfo.GetMacroCall(target.ID()); found { + macroTarget = p.exprFactory.NewUnspecifiedExpr(target.ID()) + } else { + macroTarget = p.buildMacroCallArg(target) + } + p.sourceInfo.SetMacroCall(exprID, p.exprFactory.NewMemberCall(0, function, macroTarget, macroArgs...)) } -// balancer performs tree balancing on operators whose arguments are of equal precedence. +// logicManager compacts logical trees into a more efficient structure which is semantically +// equivalent with how the logic graph is constructed by the ANTLR parser. // -// The purpose of the balancer is to ensure a compact serialization format for the logical &&, || +// The purpose of the logicManager is to ensure a compact serialization format for the logical &&, || // operators which have a tendency to create long DAGs which are skewed in one direction. Since the // operators are commutative re-ordering the terms *must not* affect the evaluation result. // -// Re-balancing the terms is a safe, if somewhat controversial choice. A better solution would be -// to make these functions variadic and update both the checker and interpreter to understand this; -// however, this is a more complex change. -// -// TODO: Consider replacing tree-balancing with variadic logical &&, || within the parser, checker, -// and interpreter. -type balancer struct { - helper *parserHelper - function string - terms []*exprpb.Expr - ops []int64 -} - -// newBalancer creates a balancer instance bound to a specific function and its first term. -func newBalancer(h *parserHelper, function string, term *exprpb.Expr) *balancer { - return &balancer{ - helper: h, - function: function, - terms: []*exprpb.Expr{term}, - ops: []int64{}, +// The logic manager will either render the terms to N-chained && / || operators as a single logical +// call with N-terms, or will rebalance the tree. Rebalancing the terms is a safe, if somewhat +// controversial choice as it alters the traditional order of execution assumptions present in most +// expressions. +type logicManager struct { + exprFactory ast.ExprFactory + function string + terms []ast.Expr + ops []int64 + variadicASTs bool +} + +// newVariadicLogicManager creates a logic manager instance bound to a specific function and its first term. +func newVariadicLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager { + return &logicManager{ + exprFactory: fac, + function: function, + terms: []ast.Expr{term}, + ops: []int64{}, + variadicASTs: true, + } +} + +// newBalancingLogicManager creates a logic manager instance bound to a specific function and its first term. +func newBalancingLogicManager(fac ast.ExprFactory, function string, term ast.Expr) *logicManager { + return &logicManager{ + exprFactory: fac, + function: function, + terms: []ast.Expr{term}, + ops: []int64{}, + variadicASTs: false, } } // addTerm adds an operation identifier and term to the set of terms to be balanced. -func (b *balancer) addTerm(op int64, term *exprpb.Expr) { - b.terms = append(b.terms, term) - b.ops = append(b.ops, op) +func (l *logicManager) addTerm(op int64, term ast.Expr) { + l.terms = append(l.terms, term) + l.ops = append(l.ops, op) } -// balance creates a balanced tree from the sub-terms and returns the final Expr value. -func (b *balancer) balance() *exprpb.Expr { - if len(b.terms) == 1 { - return b.terms[0] +// toExpr renders the logic graph into an Expr value, either balancing a tree of logical +// operations or creating a variadic representation of the logical operator. +func (l *logicManager) toExpr() ast.Expr { + if len(l.terms) == 1 { + return l.terms[0] } - return b.balancedTree(0, len(b.ops)-1) + if l.variadicASTs { + return l.exprFactory.NewCall(l.ops[0], l.function, l.terms...) + } + return l.balancedTree(0, len(l.ops)-1) } // balancedTree recursively balances the terms provided to a commutative operator. -func (b *balancer) balancedTree(lo, hi int) *exprpb.Expr { +func (l *logicManager) balancedTree(lo, hi int) ast.Expr { mid := (lo + hi + 1) / 2 - var left *exprpb.Expr + var left ast.Expr if mid == lo { - left = b.terms[mid] + left = l.terms[mid] } else { - left = b.balancedTree(lo, mid-1) + left = l.balancedTree(lo, mid-1) } - var right *exprpb.Expr + var right ast.Expr if mid == hi { - right = b.terms[mid+1] + right = l.terms[mid+1] } else { - right = b.balancedTree(mid+1, hi) + right = l.balancedTree(mid+1, hi) } - return b.helper.newGlobalCall(b.ops[mid], b.function, left, right) + return l.exprFactory.NewCall(l.ops[mid], l.function, left, right) } type exprHelper struct { @@ -370,197 +311,157 @@ func (e *exprHelper) nextMacroID() int64 { } // Copy implements the ExprHelper interface method by producing a copy of the input Expr value -// with a fresh set of numeric identifiers the Expr and all its descendents. -func (e *exprHelper) Copy(expr *exprpb.Expr) *exprpb.Expr { - copy := e.parserHelper.newExpr(e.parserHelper.getLocation(expr.GetId())) - switch expr.GetExprKind().(type) { - case *exprpb.Expr_ConstExpr: - copy.ExprKind = &exprpb.Expr_ConstExpr{ConstExpr: expr.GetConstExpr()} - case *exprpb.Expr_IdentExpr: - copy.ExprKind = &exprpb.Expr_IdentExpr{IdentExpr: expr.GetIdentExpr()} - case *exprpb.Expr_SelectExpr: - op := expr.GetSelectExpr().GetOperand() - copy.ExprKind = &exprpb.Expr_SelectExpr{SelectExpr: &exprpb.Expr_Select{ - Operand: e.Copy(op), - Field: expr.GetSelectExpr().GetField(), - TestOnly: expr.GetSelectExpr().GetTestOnly(), - }} - case *exprpb.Expr_CallExpr: - call := expr.GetCallExpr() - target := call.GetTarget() - if target != nil { - target = e.Copy(target) +// with a fresh set of numeric identifiers the Expr and all its descendants. +func (e *exprHelper) Copy(expr ast.Expr) ast.Expr { + offsetRange, _ := e.parserHelper.sourceInfo.GetOffsetRange(expr.ID()) + copyID := e.parserHelper.newID(offsetRange) + switch expr.Kind() { + case ast.LiteralKind: + return e.exprFactory.NewLiteral(copyID, expr.AsLiteral()) + case ast.IdentKind: + return e.exprFactory.NewIdent(copyID, expr.AsIdent()) + case ast.SelectKind: + sel := expr.AsSelect() + op := e.Copy(sel.Operand()) + if sel.IsTestOnly() { + return e.exprFactory.NewPresenceTest(copyID, op, sel.FieldName()) } - args := call.GetArgs() - argsCopy := make([]*exprpb.Expr, len(args)) + return e.exprFactory.NewSelect(copyID, op, sel.FieldName()) + case ast.CallKind: + call := expr.AsCall() + args := call.Args() + argsCopy := make([]ast.Expr, len(args)) for i, arg := range args { argsCopy[i] = e.Copy(arg) } - copy.ExprKind = &exprpb.Expr_CallExpr{ - CallExpr: &exprpb.Expr_Call{ - Function: call.GetFunction(), - Target: target, - Args: argsCopy, - }, + if !call.IsMemberFunction() { + return e.exprFactory.NewCall(copyID, call.FunctionName(), argsCopy...) } - case *exprpb.Expr_ListExpr: - elems := expr.GetListExpr().GetElements() - elemsCopy := make([]*exprpb.Expr, len(elems)) + return e.exprFactory.NewMemberCall(copyID, call.FunctionName(), e.Copy(call.Target()), argsCopy...) + case ast.ListKind: + list := expr.AsList() + elems := list.Elements() + elemsCopy := make([]ast.Expr, len(elems)) for i, elem := range elems { elemsCopy[i] = e.Copy(elem) } - copy.ExprKind = &exprpb.Expr_ListExpr{ - ListExpr: &exprpb.Expr_CreateList{Elements: elemsCopy}, - } - case *exprpb.Expr_StructExpr: - entries := expr.GetStructExpr().GetEntries() - entriesCopy := make([]*exprpb.Expr_CreateStruct_Entry, len(entries)) - for i, entry := range entries { - entryCopy := &exprpb.Expr_CreateStruct_Entry{} - entryCopy.Id = e.nextMacroID() - switch entry.GetKeyKind().(type) { - case *exprpb.Expr_CreateStruct_Entry_FieldKey: - entryCopy.KeyKind = &exprpb.Expr_CreateStruct_Entry_FieldKey{ - FieldKey: entry.GetFieldKey(), - } - case *exprpb.Expr_CreateStruct_Entry_MapKey: - entryCopy.KeyKind = &exprpb.Expr_CreateStruct_Entry_MapKey{ - MapKey: e.Copy(entry.GetMapKey()), - } - } - entryCopy.Value = e.Copy(entry.GetValue()) - entriesCopy[i] = entryCopy - } - copy.ExprKind = &exprpb.Expr_StructExpr{ - StructExpr: &exprpb.Expr_CreateStruct{ - MessageName: expr.GetStructExpr().GetMessageName(), - Entries: entriesCopy, - }, + return e.exprFactory.NewList(copyID, elemsCopy, list.OptionalIndices()) + case ast.MapKind: + m := expr.AsMap() + entries := m.Entries() + entriesCopy := make([]ast.EntryExpr, len(entries)) + for i, en := range entries { + entry := en.AsMapEntry() + entryID := e.nextMacroID() + entriesCopy[i] = e.exprFactory.NewMapEntry(entryID, + e.Copy(entry.Key()), e.Copy(entry.Value()), entry.IsOptional()) } - case *exprpb.Expr_ComprehensionExpr: - iterRange := e.Copy(expr.GetComprehensionExpr().GetIterRange()) - accuInit := e.Copy(expr.GetComprehensionExpr().GetAccuInit()) - cond := e.Copy(expr.GetComprehensionExpr().GetLoopCondition()) - step := e.Copy(expr.GetComprehensionExpr().GetLoopStep()) - result := e.Copy(expr.GetComprehensionExpr().GetResult()) - copy.ExprKind = &exprpb.Expr_ComprehensionExpr{ - ComprehensionExpr: &exprpb.Expr_Comprehension{ - IterRange: iterRange, - IterVar: expr.GetComprehensionExpr().GetIterVar(), - AccuInit: accuInit, - AccuVar: expr.GetComprehensionExpr().GetAccuVar(), - LoopCondition: cond, - LoopStep: step, - Result: result, - }, + return e.exprFactory.NewMap(copyID, entriesCopy) + case ast.StructKind: + s := expr.AsStruct() + fields := s.Fields() + fieldsCopy := make([]ast.EntryExpr, len(fields)) + for i, f := range fields { + field := f.AsStructField() + fieldID := e.nextMacroID() + fieldsCopy[i] = e.exprFactory.NewStructField(fieldID, + field.Name(), e.Copy(field.Value()), field.IsOptional()) } + return e.exprFactory.NewStruct(copyID, s.TypeName(), fieldsCopy) + case ast.ComprehensionKind: + compre := expr.AsComprehension() + iterRange := e.Copy(compre.IterRange()) + accuInit := e.Copy(compre.AccuInit()) + cond := e.Copy(compre.LoopCondition()) + step := e.Copy(compre.LoopStep()) + result := e.Copy(compre.Result()) + return e.exprFactory.NewComprehension(copyID, + iterRange, compre.IterVar(), compre.AccuVar(), accuInit, cond, step, result) } - return copy -} - -// LiteralBool implements the ExprHelper interface method. -func (e *exprHelper) LiteralBool(value bool) *exprpb.Expr { - return e.parserHelper.newLiteralBool(e.nextMacroID(), value) + return e.exprFactory.NewUnspecifiedExpr(copyID) } -// LiteralBytes implements the ExprHelper interface method. -func (e *exprHelper) LiteralBytes(value []byte) *exprpb.Expr { - return e.parserHelper.newLiteralBytes(e.nextMacroID(), value) -} - -// LiteralDouble implements the ExprHelper interface method. -func (e *exprHelper) LiteralDouble(value float64) *exprpb.Expr { - return e.parserHelper.newLiteralDouble(e.nextMacroID(), value) -} - -// LiteralInt implements the ExprHelper interface method. -func (e *exprHelper) LiteralInt(value int64) *exprpb.Expr { - return e.parserHelper.newLiteralInt(e.nextMacroID(), value) -} - -// LiteralString implements the ExprHelper interface method. -func (e *exprHelper) LiteralString(value string) *exprpb.Expr { - return e.parserHelper.newLiteralString(e.nextMacroID(), value) -} - -// LiteralUint implements the ExprHelper interface method. -func (e *exprHelper) LiteralUint(value uint64) *exprpb.Expr { - return e.parserHelper.newLiteralUint(e.nextMacroID(), value) +// NewLiteral implements the ExprHelper interface method. +func (e *exprHelper) NewLiteral(value ref.Val) ast.Expr { + return e.exprFactory.NewLiteral(e.nextMacroID(), value) } // NewList implements the ExprHelper interface method. -func (e *exprHelper) NewList(elems ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newList(e.nextMacroID(), elems) +func (e *exprHelper) NewList(elems ...ast.Expr) ast.Expr { + return e.exprFactory.NewList(e.nextMacroID(), elems, []int32{}) } // NewMap implements the ExprHelper interface method. -func (e *exprHelper) NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - return e.parserHelper.newMap(e.nextMacroID(), entries...) +func (e *exprHelper) NewMap(entries ...ast.EntryExpr) ast.Expr { + return e.exprFactory.NewMap(e.nextMacroID(), entries) } // NewMapEntry implements the ExprHelper interface method. -func (e *exprHelper) NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return e.parserHelper.newMapEntry(e.nextMacroID(), key, val, optional) +func (e *exprHelper) NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr { + return e.exprFactory.NewMapEntry(e.nextMacroID(), key, val, optional) } -// NewObject implements the ExprHelper interface method. -func (e *exprHelper) NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr { - return e.parserHelper.newObject(e.nextMacroID(), typeName, fieldInits...) +// NewStruct implements the ExprHelper interface method. +func (e *exprHelper) NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr { + return e.exprFactory.NewStruct(e.nextMacroID(), typeName, fieldInits) } -// NewObjectFieldInit implements the ExprHelper interface method. -func (e *exprHelper) NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry { - return e.parserHelper.newObjectField(e.nextMacroID(), field, init, optional) +// NewStructField implements the ExprHelper interface method. +func (e *exprHelper) NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr { + return e.exprFactory.NewStructField(e.nextMacroID(), field, init, optional) } -// Fold implements the ExprHelper interface method. -func (e *exprHelper) Fold(iterVar string, - iterRange *exprpb.Expr, +// NewComprehension implements the ExprHelper interface method. +func (e *exprHelper) NewComprehension( + iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newComprehension( - e.nextMacroID(), iterVar, iterRange, accuVar, accuInit, condition, step, result) + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr { + return e.exprFactory.NewComprehension( + e.nextMacroID(), iterRange, iterVar, accuVar, accuInit, condition, step, result) } -// Ident implements the ExprHelper interface method. -func (e *exprHelper) Ident(name string) *exprpb.Expr { - return e.parserHelper.newIdent(e.nextMacroID(), name) +// NewIdent implements the ExprHelper interface method. +func (e *exprHelper) NewIdent(name string) ast.Expr { + return e.exprFactory.NewIdent(e.nextMacroID(), name) } -// AccuIdent implements the ExprHelper interface method. -func (e *exprHelper) AccuIdent() *exprpb.Expr { - return e.parserHelper.newIdent(e.nextMacroID(), AccumulatorName) +// NewAccuIdent implements the ExprHelper interface method. +func (e *exprHelper) NewAccuIdent() ast.Expr { + return e.exprFactory.NewAccuIdent(e.nextMacroID()) } -// GlobalCall implements the ExprHelper interface method. -func (e *exprHelper) GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newGlobalCall(e.nextMacroID(), function, args...) +// NewGlobalCall implements the ExprHelper interface method. +func (e *exprHelper) NewCall(function string, args ...ast.Expr) ast.Expr { + return e.exprFactory.NewCall(e.nextMacroID(), function, args...) } -// ReceiverCall implements the ExprHelper interface method. -func (e *exprHelper) ReceiverCall(function string, - target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { - return e.parserHelper.newReceiverCall(e.nextMacroID(), function, target, args...) +// NewMemberCall implements the ExprHelper interface method. +func (e *exprHelper) NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr { + return e.exprFactory.NewMemberCall(e.nextMacroID(), function, target, args...) } -// PresenceTest implements the ExprHelper interface method. -func (e *exprHelper) PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr { - return e.parserHelper.newPresenceTest(e.nextMacroID(), operand, field) +// NewPresenceTest implements the ExprHelper interface method. +func (e *exprHelper) NewPresenceTest(operand ast.Expr, field string) ast.Expr { + return e.exprFactory.NewPresenceTest(e.nextMacroID(), operand, field) } -// Select implements the ExprHelper interface method. -func (e *exprHelper) Select(operand *exprpb.Expr, field string) *exprpb.Expr { - return e.parserHelper.newSelect(e.nextMacroID(), operand, field) +// NewSelect implements the ExprHelper interface method. +func (e *exprHelper) NewSelect(operand ast.Expr, field string) ast.Expr { + return e.exprFactory.NewSelect(e.nextMacroID(), operand, field) } // OffsetLocation implements the ExprHelper interface method. func (e *exprHelper) OffsetLocation(exprID int64) common.Location { - offset := e.parserHelper.positions[exprID] - location, _ := e.parserHelper.source.OffsetLocation(offset) - return location + return e.parserHelper.sourceInfo.GetStartLocation(exprID) +} + +// NewError associates an error message with a given expression id, populating the source offset location of the error if possible. +func (e *exprHelper) NewError(exprID int64, message string) *common.Error { + return common.NewError(exprID, message, e.OffsetLocation(exprID)) } var ( diff --git a/vendor/github.com/google/cel-go/parser/input.go b/vendor/github.com/google/cel-go/parser/input.go index 810eaff21..44792455d 100644 --- a/vendor/github.com/google/cel-go/parser/input.go +++ b/vendor/github.com/google/cel-go/parser/input.go @@ -15,7 +15,7 @@ package parser import ( - antlr "github.com/antlr/antlr4/runtime/Go/antlr/v4" + antlr "github.com/antlr4-go/antlr/v4" "github.com/google/cel-go/common/runes" ) @@ -110,7 +110,7 @@ func (c *charStream) GetTextFromTokens(start, stop antlr.Token) string { } // GetTextFromInterval implements (antlr.CharStream).GetTextFromInterval. -func (c *charStream) GetTextFromInterval(i *antlr.Interval) string { +func (c *charStream) GetTextFromInterval(i antlr.Interval) string { return c.GetText(i.Start, i.Stop) } diff --git a/vendor/github.com/google/cel-go/parser/macro.go b/vendor/github.com/google/cel-go/parser/macro.go index 80e5c66c6..5b1775bed 100644 --- a/vendor/github.com/google/cel-go/parser/macro.go +++ b/vendor/github.com/google/cel-go/parser/macro.go @@ -18,9 +18,10 @@ import ( "fmt" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" + "github.com/google/cel-go/common/types/ref" ) // NewGlobalMacro creates a Macro for a global function with the specified arg count. @@ -142,58 +143,38 @@ func makeVarArgMacroKey(name string, receiverStyle bool) string { // and produces as output an Expr ast node. // // Note: when the Macro.IsReceiverStyle() method returns true, the target argument will be nil. -type MacroExpander func(eh ExprHelper, - target *exprpb.Expr, - args []*exprpb.Expr) (*exprpb.Expr, *common.Error) +type MacroExpander func(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) -// ExprHelper assists with the manipulation of proto-based Expr values in a manner which is -// consistent with the source position and expression id generation code leveraged by both -// the parser and type-checker. +// ExprHelper assists with the creation of Expr values in a manner which is consistent +// the internal semantics and id generation behaviors of the parser and checker libraries. type ExprHelper interface { // Copy the input expression with a brand new set of identifiers. - Copy(*exprpb.Expr) *exprpb.Expr - - // LiteralBool creates an Expr value for a bool literal. - LiteralBool(value bool) *exprpb.Expr - - // LiteralBytes creates an Expr value for a byte literal. - LiteralBytes(value []byte) *exprpb.Expr - - // LiteralDouble creates an Expr value for double literal. - LiteralDouble(value float64) *exprpb.Expr - - // LiteralInt creates an Expr value for an int literal. - LiteralInt(value int64) *exprpb.Expr + Copy(ast.Expr) ast.Expr - // LiteralString creates am Expr value for a string literal. - LiteralString(value string) *exprpb.Expr + // Literal creates an Expr value for a scalar literal value. + NewLiteral(value ref.Val) ast.Expr - // LiteralUint creates an Expr value for a uint literal. - LiteralUint(value uint64) *exprpb.Expr - - // NewList creates a CreateList instruction where the list is comprised of the optional set - // of elements provided as arguments. - NewList(elems ...*exprpb.Expr) *exprpb.Expr + // NewList creates a list literal instruction with an optional set of elements. + NewList(elems ...ast.Expr) ast.Expr // NewMap creates a CreateStruct instruction for a map where the map is comprised of the // optional set of key, value entries. - NewMap(entries ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + NewMap(entries ...ast.EntryExpr) ast.Expr // NewMapEntry creates a Map Entry for the key, value pair. - NewMapEntry(key *exprpb.Expr, val *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + NewMapEntry(key ast.Expr, val ast.Expr, optional bool) ast.EntryExpr - // NewObject creates a CreateStruct instruction for an object with a given type name and - // optional set of field initializers. - NewObject(typeName string, fieldInits ...*exprpb.Expr_CreateStruct_Entry) *exprpb.Expr + // NewStruct creates a struct literal expression with an optional set of field initializers. + NewStruct(typeName string, fieldInits ...ast.EntryExpr) ast.Expr - // NewObjectFieldInit creates a new Object field initializer from the field name and value. - NewObjectFieldInit(field string, init *exprpb.Expr, optional bool) *exprpb.Expr_CreateStruct_Entry + // NewStructField creates a new struct field initializer from the field name and value. + NewStructField(field string, init ast.Expr, optional bool) ast.EntryExpr - // Fold creates a fold comprehension instruction. + // NewComprehension creates a new comprehension instruction. // - // - iterVar is the iteration variable name. // - iterRange represents the expression that resolves to a list or map where the elements or // keys (respectively) will be iterated over. + // - iterVar is the iteration variable name. // - accuVar is the accumulation variable name, typically parser.AccumulatorName. // - accuInit is the initial expression whose value will be set for the accuVar prior to // folding. @@ -204,34 +185,37 @@ type ExprHelper interface { // The accuVar should not shadow variable names that you would like to reference within the // environment in the step and condition expressions. Presently, the name __result__ is commonly // used by built-in macros but this may change in the future. - Fold(iterVar string, - iterRange *exprpb.Expr, + NewComprehension(iterRange ast.Expr, + iterVar string, accuVar string, - accuInit *exprpb.Expr, - condition *exprpb.Expr, - step *exprpb.Expr, - result *exprpb.Expr) *exprpb.Expr + accuInit ast.Expr, + condition ast.Expr, + step ast.Expr, + result ast.Expr) ast.Expr - // Ident creates an identifier Expr value. - Ident(name string) *exprpb.Expr + // NewIdent creates an identifier Expr value. + NewIdent(name string) ast.Expr - // AccuIdent returns an accumulator identifier for use with comprehension results. - AccuIdent() *exprpb.Expr + // NewAccuIdent returns an accumulator identifier for use with comprehension results. + NewAccuIdent() ast.Expr - // GlobalCall creates a function call Expr value for a global (free) function. - GlobalCall(function string, args ...*exprpb.Expr) *exprpb.Expr + // NewCall creates a function call Expr value for a global (free) function. + NewCall(function string, args ...ast.Expr) ast.Expr - // ReceiverCall creates a function call Expr value for a receiver-style function. - ReceiverCall(function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr + // NewMemberCall creates a function call Expr value for a receiver-style function. + NewMemberCall(function string, target ast.Expr, args ...ast.Expr) ast.Expr - // PresenceTest creates a Select TestOnly Expr value for modelling has() semantics. - PresenceTest(operand *exprpb.Expr, field string) *exprpb.Expr + // NewPresenceTest creates a Select TestOnly Expr value for modelling has() semantics. + NewPresenceTest(operand ast.Expr, field string) ast.Expr - // Select create a field traversal Expr value. - Select(operand *exprpb.Expr, field string) *exprpb.Expr + // NewSelect create a field traversal Expr value. + NewSelect(operand ast.Expr, field string) ast.Expr // OffsetLocation returns the Location of the expression identifier. OffsetLocation(exprID int64) common.Location + + // NewError associates an error message with a given expression id. + NewError(exprID int64, message string) *common.Error } var ( @@ -293,21 +277,21 @@ const ( // MakeAll expands the input call arguments into a comprehension that returns true if all of the // elements in the range match the predicate expressions: // .all(, ) -func MakeAll(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeAll(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierAll, eh, target, args) } // MakeExists expands the input call arguments into a comprehension that returns true if any of the // elements in the range match the predicate expressions: // .exists(, ) -func MakeExists(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeExists(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierExists, eh, target, args) } // MakeExistsOne expands the input call arguments into a comprehension that returns true if exactly // one of the elements in the range match the predicate expressions: // .exists_one(, ) -func MakeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeExistsOne(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { return makeQuantifier(quantifierExistsOne, eh, target, args) } @@ -321,14 +305,14 @@ func MakeExistsOne(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*ex // // In the second form only iterVar values which return true when provided to the predicate expression // are transformed. -func MakeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeMap(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - return nil, &common.Error{Message: "argument is not an identifier"} + return nil, eh.NewError(args[0].ID(), "argument is not an identifier") } - var fn *exprpb.Expr - var filter *exprpb.Expr + var fn ast.Expr + var filter ast.Expr if len(args) == 3 { filter = args[1] @@ -338,88 +322,85 @@ func MakeMap(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.E fn = args[1] } - accuExpr := eh.Ident(AccumulatorName) + accuExpr := eh.NewAccuIdent() init := eh.NewList() - condition := eh.LiteralBool(true) - step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(fn)) + condition := eh.NewLiteral(types.True) + step := eh.NewCall(operators.Add, accuExpr, eh.NewList(fn)) if filter != nil { - step = eh.GlobalCall(operators.Conditional, filter, step, accuExpr) + step = eh.NewCall(operators.Conditional, filter, step, accuExpr) } - return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil } // MakeFilter expands the input call arguments into a comprehension which produces a list which contains // only elements which match the provided predicate expression: // .filter(, ) -func MakeFilter(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func MakeFilter(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - return nil, &common.Error{Message: "argument is not an identifier"} + return nil, eh.NewError(args[0].ID(), "argument is not an identifier") } filter := args[1] - accuExpr := eh.Ident(AccumulatorName) + accuExpr := eh.NewAccuIdent() init := eh.NewList() - condition := eh.LiteralBool(true) - step := eh.GlobalCall(operators.Add, accuExpr, eh.NewList(args[0])) - step = eh.GlobalCall(operators.Conditional, filter, step, accuExpr) - return eh.Fold(v, target, AccumulatorName, init, condition, step, accuExpr), nil + condition := eh.NewLiteral(types.True) + step := eh.NewCall(operators.Add, accuExpr, eh.NewList(args[0])) + step = eh.NewCall(operators.Conditional, filter, step, accuExpr) + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, accuExpr), nil } // MakeHas expands the input call arguments into a presence test, e.g. has(.field) -func MakeHas(eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { - if s, ok := args[0].ExprKind.(*exprpb.Expr_SelectExpr); ok { - return eh.PresenceTest(s.SelectExpr.GetOperand(), s.SelectExpr.GetField()), nil +func MakeHas(eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { + if args[0].Kind() == ast.SelectKind { + s := args[0].AsSelect() + return eh.NewPresenceTest(s.Operand(), s.FieldName()), nil } - return nil, &common.Error{Message: "invalid argument to has() macro"} + return nil, eh.NewError(args[0].ID(), "invalid argument to has() macro") } -func makeQuantifier(kind quantifierKind, eh ExprHelper, target *exprpb.Expr, args []*exprpb.Expr) (*exprpb.Expr, *common.Error) { +func makeQuantifier(kind quantifierKind, eh ExprHelper, target ast.Expr, args []ast.Expr) (ast.Expr, *common.Error) { v, found := extractIdent(args[0]) if !found { - location := eh.OffsetLocation(args[0].GetId()) - return nil, &common.Error{ - Message: "argument must be a simple name", - Location: location, - } + return nil, eh.NewError(args[0].ID(), "argument must be a simple name") } - var init *exprpb.Expr - var condition *exprpb.Expr - var step *exprpb.Expr - var result *exprpb.Expr + var init ast.Expr + var condition ast.Expr + var step ast.Expr + var result ast.Expr switch kind { case quantifierAll: - init = eh.LiteralBool(true) - condition = eh.GlobalCall(operators.NotStrictlyFalse, eh.AccuIdent()) - step = eh.GlobalCall(operators.LogicalAnd, eh.AccuIdent(), args[1]) - result = eh.AccuIdent() + init = eh.NewLiteral(types.True) + condition = eh.NewCall(operators.NotStrictlyFalse, eh.NewAccuIdent()) + step = eh.NewCall(operators.LogicalAnd, eh.NewAccuIdent(), args[1]) + result = eh.NewAccuIdent() case quantifierExists: - init = eh.LiteralBool(false) - condition = eh.GlobalCall( + init = eh.NewLiteral(types.False) + condition = eh.NewCall( operators.NotStrictlyFalse, - eh.GlobalCall(operators.LogicalNot, eh.AccuIdent())) - step = eh.GlobalCall(operators.LogicalOr, eh.AccuIdent(), args[1]) - result = eh.AccuIdent() + eh.NewCall(operators.LogicalNot, eh.NewAccuIdent())) + step = eh.NewCall(operators.LogicalOr, eh.NewAccuIdent(), args[1]) + result = eh.NewAccuIdent() case quantifierExistsOne: - zeroExpr := eh.LiteralInt(0) - oneExpr := eh.LiteralInt(1) + zeroExpr := eh.NewLiteral(types.Int(0)) + oneExpr := eh.NewLiteral(types.Int(1)) init = zeroExpr - condition = eh.LiteralBool(true) - step = eh.GlobalCall(operators.Conditional, args[1], - eh.GlobalCall(operators.Add, eh.AccuIdent(), oneExpr), eh.AccuIdent()) - result = eh.GlobalCall(operators.Equals, eh.AccuIdent(), oneExpr) + condition = eh.NewLiteral(types.True) + step = eh.NewCall(operators.Conditional, args[1], + eh.NewCall(operators.Add, eh.NewAccuIdent(), oneExpr), eh.NewAccuIdent()) + result = eh.NewCall(operators.Equals, eh.NewAccuIdent(), oneExpr) default: - return nil, &common.Error{Message: fmt.Sprintf("unrecognized quantifier '%v'", kind)} + return nil, eh.NewError(args[0].ID(), fmt.Sprintf("unrecognized quantifier '%v'", kind)) } - return eh.Fold(v, target, AccumulatorName, init, condition, step, result), nil + return eh.NewComprehension(target, v, AccumulatorName, init, condition, step, result), nil } -func extractIdent(e *exprpb.Expr) (string, bool) { - switch e.ExprKind.(type) { - case *exprpb.Expr_IdentExpr: - return e.GetIdentExpr().GetName(), true +func extractIdent(e ast.Expr) (string, bool) { + switch e.Kind() { + case ast.IdentKind: + return e.AsIdent(), true } return "", false } diff --git a/vendor/github.com/google/cel-go/parser/options.go b/vendor/github.com/google/cel-go/parser/options.go index 674c697c5..61fc3adec 100644 --- a/vendor/github.com/google/cel-go/parser/options.go +++ b/vendor/github.com/google/cel-go/parser/options.go @@ -25,6 +25,7 @@ type options struct { macros map[string]Macro populateMacroCalls bool enableOptionalSyntax bool + enableVariadicOperatorASTs bool } // Option configures the behavior of the parser. @@ -125,3 +126,15 @@ func EnableOptionalSyntax(optionalSyntax bool) Option { return nil } } + +// EnableVariadicOperatorASTs enables a compact representation of chained like-kind commutative +// operators. e.g. `a || b || c || d` -> `call(op='||', args=[a, b, c, d])` +// +// The benefit of enabling variadic operators ASTs is a more compact representation deeply nested +// logic graphs. +func EnableVariadicOperatorASTs(varArgASTs bool) Option { + return func(opts *options) error { + opts.enableVariadicOperatorASTs = varArgASTs + return nil + } +} diff --git a/vendor/github.com/google/cel-go/parser/parser.go b/vendor/github.com/google/cel-go/parser/parser.go index e6f70f906..cb753df73 100644 --- a/vendor/github.com/google/cel-go/parser/parser.go +++ b/vendor/github.com/google/cel-go/parser/parser.go @@ -21,17 +21,15 @@ import ( "regexp" "strconv" "strings" - "sync" - antlr "github.com/antlr/antlr4/runtime/Go/antlr/v4" + antlr "github.com/antlr4-go/antlr/v4" "github.com/google/cel-go/common" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" "github.com/google/cel-go/common/runes" + "github.com/google/cel-go/common/types" "github.com/google/cel-go/parser/gen" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" - structpb "google.golang.org/protobuf/types/known/structpb" ) // Parser encapsulates the context necessary to perform parsing for different expressions. @@ -88,10 +86,13 @@ func mustNewParser(opts ...Option) *Parser { } // Parse parses the expression represented by source and returns the result. -func (p *Parser) Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors) { +func (p *Parser) Parse(source common.Source) (*ast.AST, *common.Errors) { + errs := common.NewErrors(source) + fac := ast.NewExprFactory() impl := parser{ - errors: &parseErrors{common.NewErrors(source)}, - helper: newParserHelper(source), + errors: &parseErrors{errs}, + exprFactory: fac, + helper: newParserHelper(source, fac), macros: p.macros, maxRecursionDepth: p.maxRecursionDepth, errorReportingLimit: p.errorReportingLimit, @@ -99,23 +100,21 @@ func (p *Parser) Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors errorRecoveryLookaheadTokenLimit: p.errorRecoveryTokenLookaheadLimit, populateMacroCalls: p.populateMacroCalls, enableOptionalSyntax: p.enableOptionalSyntax, + enableVariadicOperatorASTs: p.enableVariadicOperatorASTs, } buf, ok := source.(runes.Buffer) if !ok { buf = runes.NewBuffer(source.Content()) } - var e *exprpb.Expr + var out ast.Expr if buf.Len() > p.expressionSizeCodePointLimit { - e = impl.reportError(common.NoLocation, + out = impl.reportError(common.NoLocation, "expression code point size exceeds limit: size: %d, limit %d", buf.Len(), p.expressionSizeCodePointLimit) } else { - e = impl.parse(buf, source.Description()) + out = impl.parse(buf, source.Description()) } - return &exprpb.ParsedExpr{ - Expr: e, - SourceInfo: impl.helper.getSourceInfo(), - }, impl.errors.Errors + return ast.NewAST(out, impl.helper.getSourceInfo()), errs } // reservedIds are not legal to use as variables. We exclude them post-parse, as they *are* valid @@ -148,7 +147,7 @@ var reservedIds = map[string]struct{}{ // This function calls ParseWithMacros with AllMacros. // // Deprecated: Use NewParser().Parse() instead. -func Parse(source common.Source) (*exprpb.ParsedExpr, *common.Errors) { +func Parse(source common.Source) (*ast.AST, *common.Errors) { return mustNewParser(Macros(AllMacros...)).Parse(source) } @@ -285,6 +284,7 @@ var _ antlr.ErrorStrategy = &recoveryLimitErrorStrategy{} type parser struct { gen.BaseCELVisitor errors *parseErrors + exprFactory ast.ExprFactory helper *parserHelper macros map[string]Macro recursionDepth int @@ -295,55 +295,24 @@ type parser struct { errorRecoveryLookaheadTokenLimit int populateMacroCalls bool enableOptionalSyntax bool + enableVariadicOperatorASTs bool } -var ( - _ gen.CELVisitor = (*parser)(nil) +var _ gen.CELVisitor = (*parser)(nil) - lexerPool *sync.Pool = &sync.Pool{ - New: func() any { - l := gen.NewCELLexer(nil) - l.RemoveErrorListeners() - return l - }, - } - - parserPool *sync.Pool = &sync.Pool{ - New: func() any { - p := gen.NewCELParser(nil) - p.RemoveErrorListeners() - return p - }, - } -) +func (p *parser) parse(expr runes.Buffer, desc string) ast.Expr { + lexer := gen.NewCELLexer(newCharStream(expr, desc)) + lexer.RemoveErrorListeners() + lexer.AddErrorListener(p) -func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr { - // TODO: get rid of these pools once https://github.com/antlr/antlr4/pull/3571 is in a release - lexer := lexerPool.Get().(*gen.CELLexer) - prsr := parserPool.Get().(*gen.CELParser) + prsr := gen.NewCELParser(antlr.NewCommonTokenStream(lexer, 0)) + prsr.RemoveErrorListeners() prsrListener := &recursionListener{ maxDepth: p.maxRecursionDepth, ruleTypeDepth: map[int]*int{}, } - defer func() { - // Unfortunately ANTLR Go runtime is missing (*antlr.BaseParser).RemoveParseListeners, - // so this is good enough until that is exported. - // Reset the lexer and parser before putting them back in the pool. - lexer.RemoveErrorListeners() - prsr.RemoveParseListener(prsrListener) - prsr.RemoveErrorListeners() - lexer.SetInputStream(nil) - prsr.SetInputStream(nil) - lexerPool.Put(lexer) - parserPool.Put(prsr) - }() - - lexer.SetInputStream(newCharStream(expr, desc)) - prsr.SetInputStream(antlr.NewCommonTokenStream(lexer, 0)) - - lexer.AddErrorListener(p) prsr.AddErrorListener(p) prsr.AddParseListener(prsrListener) @@ -357,9 +326,9 @@ func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr { if val := recover(); val != nil { switch err := val.(type) { case *lookaheadLimitError: - p.errors.ReportError(common.NoLocation, err.Error()) + p.errors.internalError(err.Error()) case *recursionError: - p.errors.ReportError(common.NoLocation, err.Error()) + p.errors.internalError(err.Error()) case *tooManyErrors: // do nothing case *recoveryLimitError: @@ -370,7 +339,7 @@ func (p *parser) parse(expr runes.Buffer, desc string) *exprpb.Expr { } }() - return p.Visit(prsr.Start()).(*exprpb.Expr) + return p.Visit(prsr.Start_()).(ast.Expr) } // Visitor implementations. @@ -449,7 +418,7 @@ func (p *parser) Visit(tree antlr.ParseTree) any { // Report at least one error if the parser reaches an unknown parse element. // Typically, this happens if the parser has already encountered a syntax error elsewhere. - if len(p.errors.GetErrors()) == 0 { + if p.errors.errorCount() == 0 { txt := "<>" if t != nil { txt = fmt.Sprintf("<<%T>>", t) @@ -467,46 +436,46 @@ func (p *parser) VisitStart(ctx *gen.StartContext) any { // Visit a parse tree produced by CELParser#expr. func (p *parser) VisitExpr(ctx *gen.ExprContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) + result := p.Visit(ctx.GetE()).(ast.Expr) if ctx.GetOp() == nil { return result } opID := p.helper.id(ctx.GetOp()) - ifTrue := p.Visit(ctx.GetE1()).(*exprpb.Expr) - ifFalse := p.Visit(ctx.GetE2()).(*exprpb.Expr) + ifTrue := p.Visit(ctx.GetE1()).(ast.Expr) + ifFalse := p.Visit(ctx.GetE2()).(ast.Expr) return p.globalCallOrMacro(opID, operators.Conditional, result, ifTrue, ifFalse) } // Visit a parse tree produced by CELParser#conditionalOr. func (p *parser) VisitConditionalOr(ctx *gen.ConditionalOrContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) - b := newBalancer(p.helper, operators.LogicalOr, result) + result := p.Visit(ctx.GetE()).(ast.Expr) + l := p.newLogicManager(operators.LogicalOr, result) rest := ctx.GetE1() for i, op := range ctx.GetOps() { if i >= len(rest) { return p.reportError(ctx, "unexpected character, wanted '||'") } - next := p.Visit(rest[i]).(*exprpb.Expr) + next := p.Visit(rest[i]).(ast.Expr) opID := p.helper.id(op) - b.addTerm(opID, next) + l.addTerm(opID, next) } - return b.balance() + return l.toExpr() } // Visit a parse tree produced by CELParser#conditionalAnd. func (p *parser) VisitConditionalAnd(ctx *gen.ConditionalAndContext) any { - result := p.Visit(ctx.GetE()).(*exprpb.Expr) - b := newBalancer(p.helper, operators.LogicalAnd, result) + result := p.Visit(ctx.GetE()).(ast.Expr) + l := p.newLogicManager(operators.LogicalAnd, result) rest := ctx.GetE1() for i, op := range ctx.GetOps() { if i >= len(rest) { return p.reportError(ctx, "unexpected character, wanted '&&'") } - next := p.Visit(rest[i]).(*exprpb.Expr) + next := p.Visit(rest[i]).(ast.Expr) opID := p.helper.id(op) - b.addTerm(opID, next) + l.addTerm(opID, next) } - return b.balance() + return l.toExpr() } // Visit a parse tree produced by CELParser#relation. @@ -516,9 +485,9 @@ func (p *parser) VisitRelation(ctx *gen.RelationContext) any { opText = ctx.GetOp().GetText() } if op, found := operators.Find(opText); found { - lhs := p.Visit(ctx.Relation(0)).(*exprpb.Expr) + lhs := p.Visit(ctx.Relation(0)).(ast.Expr) opID := p.helper.id(ctx.GetOp()) - rhs := p.Visit(ctx.Relation(1)).(*exprpb.Expr) + rhs := p.Visit(ctx.Relation(1)).(ast.Expr) return p.globalCallOrMacro(opID, op, lhs, rhs) } return p.reportError(ctx, "operator not found") @@ -531,9 +500,9 @@ func (p *parser) VisitCalc(ctx *gen.CalcContext) any { opText = ctx.GetOp().GetText() } if op, found := operators.Find(opText); found { - lhs := p.Visit(ctx.Calc(0)).(*exprpb.Expr) + lhs := p.Visit(ctx.Calc(0)).(ast.Expr) opID := p.helper.id(ctx.GetOp()) - rhs := p.Visit(ctx.Calc(1)).(*exprpb.Expr) + rhs := p.Visit(ctx.Calc(1)).(ast.Expr) return p.globalCallOrMacro(opID, op, lhs, rhs) } return p.reportError(ctx, "operator not found") @@ -549,7 +518,7 @@ func (p *parser) VisitLogicalNot(ctx *gen.LogicalNotContext) any { return p.Visit(ctx.Member()) } opID := p.helper.id(ctx.GetOps()[0]) - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) return p.globalCallOrMacro(opID, operators.LogicalNot, target) } @@ -558,13 +527,13 @@ func (p *parser) VisitNegate(ctx *gen.NegateContext) any { return p.Visit(ctx.Member()) } opID := p.helper.id(ctx.GetOps()[0]) - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) return p.globalCallOrMacro(opID, operators.Negate, target) } // VisitSelect visits a parse tree produced by CELParser#Select. func (p *parser) VisitSelect(ctx *gen.SelectContext) any { - operand := p.Visit(ctx.Member()).(*exprpb.Expr) + operand := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetId() == nil || ctx.GetOp() == nil { return p.helper.newExpr(ctx) @@ -585,7 +554,7 @@ func (p *parser) VisitSelect(ctx *gen.SelectContext) any { // VisitMemberCall visits a parse tree produced by CELParser#MemberCall. func (p *parser) VisitMemberCall(ctx *gen.MemberCallContext) any { - operand := p.Visit(ctx.Member()).(*exprpb.Expr) + operand := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetId() == nil { return p.helper.newExpr(ctx) @@ -597,13 +566,13 @@ func (p *parser) VisitMemberCall(ctx *gen.MemberCallContext) any { // Visit a parse tree produced by CELParser#Index. func (p *parser) VisitIndex(ctx *gen.IndexContext) any { - target := p.Visit(ctx.Member()).(*exprpb.Expr) + target := p.Visit(ctx.Member()).(ast.Expr) // Handle the error case where no valid identifier is specified. if ctx.GetOp() == nil { return p.helper.newExpr(ctx) } opID := p.helper.id(ctx.GetOp()) - index := p.Visit(ctx.GetIndex()).(*exprpb.Expr) + index := p.Visit(ctx.GetIndex()).(ast.Expr) operator := operators.Index if ctx.GetOpt() != nil { if !p.enableOptionalSyntax { @@ -627,7 +596,7 @@ func (p *parser) VisitCreateMessage(ctx *gen.CreateMessageContext) any { messageName = "." + messageName } objID := p.helper.id(ctx.GetOp()) - entries := p.VisitIFieldInitializerList(ctx.GetEntries()).([]*exprpb.Expr_CreateStruct_Entry) + entries := p.VisitIFieldInitializerList(ctx.GetEntries()).([]ast.EntryExpr) return p.helper.newObject(objID, messageName, entries...) } @@ -635,16 +604,16 @@ func (p *parser) VisitCreateMessage(ctx *gen.CreateMessageContext) any { func (p *parser) VisitIFieldInitializerList(ctx gen.IFieldInitializerListContext) any { if ctx == nil || ctx.GetFields() == nil { // This is the result of a syntax error handled elswhere, return empty. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } - result := make([]*exprpb.Expr_CreateStruct_Entry, len(ctx.GetFields())) + result := make([]ast.EntryExpr, len(ctx.GetFields())) cols := ctx.GetCols() vals := ctx.GetValues() for i, f := range ctx.GetFields() { if i >= len(cols) || i >= len(vals) { // This is the result of a syntax error detected elsewhere. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } initID := p.helper.id(cols[i]) optField := f.(*gen.OptFieldContext) @@ -656,10 +625,10 @@ func (p *parser) VisitIFieldInitializerList(ctx gen.IFieldInitializerListContext // The field may be empty due to a prior error. id := optField.IDENTIFIER() if id == nil { - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } fieldName := id.GetText() - value := p.Visit(vals[i]).(*exprpb.Expr) + value := p.Visit(vals[i]).(ast.Expr) field := p.helper.newObjectField(initID, fieldName, value, optional) result[i] = field } @@ -699,9 +668,9 @@ func (p *parser) VisitCreateList(ctx *gen.CreateListContext) any { // Visit a parse tree produced by CELParser#CreateStruct. func (p *parser) VisitCreateStruct(ctx *gen.CreateStructContext) any { structID := p.helper.id(ctx.GetOp()) - entries := []*exprpb.Expr_CreateStruct_Entry{} + entries := []ast.EntryExpr{} if ctx.GetEntries() != nil { - entries = p.Visit(ctx.GetEntries()).([]*exprpb.Expr_CreateStruct_Entry) + entries = p.Visit(ctx.GetEntries()).([]ast.EntryExpr) } return p.helper.newMap(structID, entries...) } @@ -710,17 +679,17 @@ func (p *parser) VisitCreateStruct(ctx *gen.CreateStructContext) any { func (p *parser) VisitMapInitializerList(ctx *gen.MapInitializerListContext) any { if ctx == nil || ctx.GetKeys() == nil { // This is the result of a syntax error handled elswhere, return empty. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } - result := make([]*exprpb.Expr_CreateStruct_Entry, len(ctx.GetCols())) + result := make([]ast.EntryExpr, len(ctx.GetCols())) keys := ctx.GetKeys() vals := ctx.GetValues() for i, col := range ctx.GetCols() { colID := p.helper.id(col) if i >= len(keys) || i >= len(vals) { // This is the result of a syntax error detected elsewhere. - return []*exprpb.Expr_CreateStruct_Entry{} + return []ast.EntryExpr{} } optKey := keys[i] optional := optKey.GetOpt() != nil @@ -728,8 +697,8 @@ func (p *parser) VisitMapInitializerList(ctx *gen.MapInitializerListContext) any p.reportError(optKey, "unsupported syntax '?'") continue } - key := p.Visit(optKey.GetE()).(*exprpb.Expr) - value := p.Visit(vals[i]).(*exprpb.Expr) + key := p.Visit(optKey.GetE()).(ast.Expr) + value := p.Visit(vals[i]).(ast.Expr) entry := p.helper.newMapEntry(colID, key, value, optional) result[i] = entry } @@ -809,30 +778,27 @@ func (p *parser) VisitBoolFalse(ctx *gen.BoolFalseContext) any { // Visit a parse tree produced by CELParser#Null. func (p *parser) VisitNull(ctx *gen.NullContext) any { - return p.helper.newLiteral(ctx, - &exprpb.Constant{ - ConstantKind: &exprpb.Constant_NullValue{ - NullValue: structpb.NullValue_NULL_VALUE}}) + return p.helper.exprFactory.NewLiteral(p.helper.newID(ctx), types.NullValue) } -func (p *parser) visitExprList(ctx gen.IExprListContext) []*exprpb.Expr { +func (p *parser) visitExprList(ctx gen.IExprListContext) []ast.Expr { if ctx == nil { - return []*exprpb.Expr{} + return []ast.Expr{} } return p.visitSlice(ctx.GetE()) } -func (p *parser) visitListInit(ctx gen.IListInitContext) ([]*exprpb.Expr, []int32) { +func (p *parser) visitListInit(ctx gen.IListInitContext) ([]ast.Expr, []int32) { if ctx == nil { - return []*exprpb.Expr{}, []int32{} + return []ast.Expr{}, []int32{} } elements := ctx.GetElems() - result := make([]*exprpb.Expr, len(elements)) + result := make([]ast.Expr, len(elements)) optionals := []int32{} for i, e := range elements { - ex := p.Visit(e.GetE()).(*exprpb.Expr) + ex := p.Visit(e.GetE()).(ast.Expr) if ex == nil { - return []*exprpb.Expr{}, []int32{} + return []ast.Expr{}, []int32{} } result[i] = ex if e.GetOpt() != nil { @@ -846,13 +812,13 @@ func (p *parser) visitListInit(ctx gen.IListInitContext) ([]*exprpb.Expr, []int3 return result, optionals } -func (p *parser) visitSlice(expressions []gen.IExprContext) []*exprpb.Expr { +func (p *parser) visitSlice(expressions []gen.IExprContext) []ast.Expr { if expressions == nil { - return []*exprpb.Expr{} + return []ast.Expr{} } - result := make([]*exprpb.Expr, len(expressions)) + result := make([]ast.Expr, len(expressions)) for i, e := range expressions { - ex := p.Visit(e).(*exprpb.Expr) + ex := p.Visit(e).(ast.Expr) result[i] = ex } return result @@ -867,18 +833,24 @@ func (p *parser) unquote(ctx any, value string, isBytes bool) string { return text } -func (p *parser) reportError(ctx any, format string, args ...any) *exprpb.Expr { +func (p *parser) newLogicManager(function string, term ast.Expr) *logicManager { + if p.enableVariadicOperatorASTs { + return newVariadicLogicManager(p.exprFactory, function, term) + } + return newBalancingLogicManager(p.exprFactory, function, term) +} + +func (p *parser) reportError(ctx any, format string, args ...any) ast.Expr { var location common.Location - switch ctx.(type) { + err := p.helper.newExpr(ctx) + switch c := ctx.(type) { case common.Location: - location = ctx.(common.Location) + location = c case antlr.Token, antlr.ParserRuleContext: - err := p.helper.newExpr(ctx) - location = p.helper.getLocation(err.GetId()) + location = p.helper.getLocation(err.ID()) } - err := p.helper.newExpr(ctx) // Provide arguments to the report error. - p.errors.ReportError(location, format, args...) + p.errors.reportErrorAtID(err.ID(), location, format, args...) return err } @@ -903,33 +875,33 @@ func (p *parser) SyntaxError(recognizer antlr.Recognizer, offendingSymbol any, l } } -func (p *parser) ReportAmbiguity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, exact bool, ambigAlts *antlr.BitSet, configs antlr.ATNConfigSet) { +func (p *parser) ReportAmbiguity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, exact bool, ambigAlts *antlr.BitSet, configs *antlr.ATNConfigSet) { // Intentional } -func (p *parser) ReportAttemptingFullContext(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, conflictingAlts *antlr.BitSet, configs antlr.ATNConfigSet) { +func (p *parser) ReportAttemptingFullContext(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex int, conflictingAlts *antlr.BitSet, configs *antlr.ATNConfigSet) { // Intentional } -func (p *parser) ReportContextSensitivity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex, prediction int, configs antlr.ATNConfigSet) { +func (p *parser) ReportContextSensitivity(recognizer antlr.Parser, dfa *antlr.DFA, startIndex, stopIndex, prediction int, configs *antlr.ATNConfigSet) { // Intentional } -func (p *parser) globalCallOrMacro(exprID int64, function string, args ...*exprpb.Expr) *exprpb.Expr { +func (p *parser) globalCallOrMacro(exprID int64, function string, args ...ast.Expr) ast.Expr { if expr, found := p.expandMacro(exprID, function, nil, args...); found { return expr } return p.helper.newGlobalCall(exprID, function, args...) } -func (p *parser) receiverCallOrMacro(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) *exprpb.Expr { +func (p *parser) receiverCallOrMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) ast.Expr { if expr, found := p.expandMacro(exprID, function, target, args...); found { return expr } return p.helper.newReceiverCall(exprID, function, target, args...) } -func (p *parser) expandMacro(exprID int64, function string, target *exprpb.Expr, args ...*exprpb.Expr) (*exprpb.Expr, bool) { +func (p *parser) expandMacro(exprID int64, function string, target ast.Expr, args ...ast.Expr) (ast.Expr, bool) { macro, found := p.macros[makeMacroKey(function, len(args), target != nil)] if !found { macro, found = p.macros[makeVarArgMacroKey(function, target != nil)] @@ -955,7 +927,7 @@ func (p *parser) expandMacro(exprID int64, function string, target *exprpb.Expr, return nil, false } if p.populateMacroCalls { - p.helper.addMacroCall(expr.GetId(), function, target, args...) + p.helper.addMacroCall(expr.ID(), function, target, args...) } return expr, true } diff --git a/vendor/github.com/google/cel-go/parser/unparser.go b/vendor/github.com/google/cel-go/parser/unparser.go index c3c40a0dd..91cf72944 100644 --- a/vendor/github.com/google/cel-go/parser/unparser.go +++ b/vendor/github.com/google/cel-go/parser/unparser.go @@ -20,9 +20,9 @@ import ( "strconv" "strings" + "github.com/google/cel-go/common/ast" "github.com/google/cel-go/common/operators" - - exprpb "google.golang.org/genproto/googleapis/api/expr/v1alpha1" + "github.com/google/cel-go/common/types" ) // Unparse takes an input expression and source position information and generates a human-readable @@ -39,7 +39,7 @@ import ( // // This function optionally takes in one or more UnparserOption to alter the unparsing behavior, such as // performing word wrapping on expressions. -func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo, opts ...UnparserOption) (string, error) { +func Unparse(expr ast.Expr, info *ast.SourceInfo, opts ...UnparserOption) (string, error) { unparserOpts := &unparserOption{ wrapOnColumn: defaultWrapOnColumn, wrapAfterColumnLimit: defaultWrapAfterColumnLimit, @@ -68,12 +68,12 @@ func Unparse(expr *exprpb.Expr, info *exprpb.SourceInfo, opts ...UnparserOption) // unparser visits an expression to reconstruct a human-readable string from an AST. type unparser struct { str strings.Builder - info *exprpb.SourceInfo + info *ast.SourceInfo options *unparserOption lastWrappedIndex int } -func (un *unparser) visit(expr *exprpb.Expr) error { +func (un *unparser) visit(expr ast.Expr) error { if expr == nil { return errors.New("unsupported expression") } @@ -81,27 +81,29 @@ func (un *unparser) visit(expr *exprpb.Expr) error { if visited || err != nil { return err } - switch expr.GetExprKind().(type) { - case *exprpb.Expr_CallExpr: + switch expr.Kind() { + case ast.CallKind: return un.visitCall(expr) - case *exprpb.Expr_ConstExpr: + case ast.LiteralKind: return un.visitConst(expr) - case *exprpb.Expr_IdentExpr: + case ast.IdentKind: return un.visitIdent(expr) - case *exprpb.Expr_ListExpr: + case ast.ListKind: return un.visitList(expr) - case *exprpb.Expr_SelectExpr: + case ast.MapKind: + return un.visitStructMap(expr) + case ast.SelectKind: return un.visitSelect(expr) - case *exprpb.Expr_StructExpr: - return un.visitStruct(expr) + case ast.StructKind: + return un.visitStructMsg(expr) default: return fmt.Errorf("unsupported expression: %v", expr) } } -func (un *unparser) visitCall(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() +func (un *unparser) visitCall(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() switch fun { // ternary operator case operators.Conditional: @@ -141,10 +143,10 @@ func (un *unparser) visitCall(expr *exprpb.Expr) error { } } -func (un *unparser) visitCallBinary(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() +func (un *unparser) visitCallBinary(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() lhs := args[0] // add parens if the current operator is lower precedence than the lhs expr operator. lhsParen := isComplexOperatorWithRespectTo(fun, lhs) @@ -168,9 +170,9 @@ func (un *unparser) visitCallBinary(expr *exprpb.Expr) error { return un.visitMaybeNested(rhs, rhsParen) } -func (un *unparser) visitCallConditional(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitCallConditional(expr ast.Expr) error { + c := expr.AsCall() + args := c.Args() // add parens if operand is a conditional itself. nested := isSamePrecedence(operators.Conditional, args[0]) || isComplexOperator(args[0]) @@ -196,13 +198,13 @@ func (un *unparser) visitCallConditional(expr *exprpb.Expr) error { return un.visitMaybeNested(args[2], nested) } -func (un *unparser) visitCallFunc(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() - if c.GetTarget() != nil { - nested := isBinaryOrTernaryOperator(c.GetTarget()) - err := un.visitMaybeNested(c.GetTarget(), nested) +func (un *unparser) visitCallFunc(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() + if c.IsMemberFunction() { + nested := isBinaryOrTernaryOperator(c.Target()) + err := un.visitMaybeNested(c.Target(), nested) if err != nil { return err } @@ -223,17 +225,17 @@ func (un *unparser) visitCallFunc(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitCallIndex(expr *exprpb.Expr) error { +func (un *unparser) visitCallIndex(expr ast.Expr) error { return un.visitCallIndexInternal(expr, "[") } -func (un *unparser) visitCallOptIndex(expr *exprpb.Expr) error { +func (un *unparser) visitCallOptIndex(expr ast.Expr) error { return un.visitCallIndexInternal(expr, "[?") } -func (un *unparser) visitCallIndexInternal(expr *exprpb.Expr, op string) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitCallIndexInternal(expr ast.Expr, op string) error { + c := expr.AsCall() + args := c.Args() nested := isBinaryOrTernaryOperator(args[0]) err := un.visitMaybeNested(args[0], nested) if err != nil { @@ -248,10 +250,10 @@ func (un *unparser) visitCallIndexInternal(expr *exprpb.Expr, op string) error { return nil } -func (un *unparser) visitCallUnary(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - fun := c.GetFunction() - args := c.GetArgs() +func (un *unparser) visitCallUnary(expr ast.Expr) error { + c := expr.AsCall() + fun := c.FunctionName() + args := c.Args() unmangled, found := operators.FindReverse(fun) if !found { return fmt.Errorf("cannot unmangle operator: %s", fun) @@ -261,35 +263,34 @@ func (un *unparser) visitCallUnary(expr *exprpb.Expr) error { return un.visitMaybeNested(args[0], nested) } -func (un *unparser) visitConst(expr *exprpb.Expr) error { - c := expr.GetConstExpr() - switch c.GetConstantKind().(type) { - case *exprpb.Constant_BoolValue: - un.str.WriteString(strconv.FormatBool(c.GetBoolValue())) - case *exprpb.Constant_BytesValue: +func (un *unparser) visitConst(expr ast.Expr) error { + val := expr.AsLiteral() + switch val := val.(type) { + case types.Bool: + un.str.WriteString(strconv.FormatBool(bool(val))) + case types.Bytes: // bytes constants are surrounded with b"" - b := c.GetBytesValue() un.str.WriteString(`b"`) - un.str.WriteString(bytesToOctets(b)) + un.str.WriteString(bytesToOctets([]byte(val))) un.str.WriteString(`"`) - case *exprpb.Constant_DoubleValue: + case types.Double: // represent the float using the minimum required digits - d := strconv.FormatFloat(c.GetDoubleValue(), 'g', -1, 64) + d := strconv.FormatFloat(float64(val), 'g', -1, 64) un.str.WriteString(d) if !strings.Contains(d, ".") { un.str.WriteString(".0") } - case *exprpb.Constant_Int64Value: - i := strconv.FormatInt(c.GetInt64Value(), 10) + case types.Int: + i := strconv.FormatInt(int64(val), 10) un.str.WriteString(i) - case *exprpb.Constant_NullValue: + case types.Null: un.str.WriteString("null") - case *exprpb.Constant_StringValue: + case types.String: // strings will be double quoted with quotes escaped. - un.str.WriteString(strconv.Quote(c.GetStringValue())) - case *exprpb.Constant_Uint64Value: + un.str.WriteString(strconv.Quote(string(val))) + case types.Uint: // uint literals have a 'u' suffix. - ui := strconv.FormatUint(c.GetUint64Value(), 10) + ui := strconv.FormatUint(uint64(val), 10) un.str.WriteString(ui) un.str.WriteString("u") default: @@ -298,16 +299,16 @@ func (un *unparser) visitConst(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitIdent(expr *exprpb.Expr) error { - un.str.WriteString(expr.GetIdentExpr().GetName()) +func (un *unparser) visitIdent(expr ast.Expr) error { + un.str.WriteString(expr.AsIdent()) return nil } -func (un *unparser) visitList(expr *exprpb.Expr) error { - l := expr.GetListExpr() - elems := l.GetElements() +func (un *unparser) visitList(expr ast.Expr) error { + l := expr.AsList() + elems := l.Elements() optIndices := make(map[int]bool, len(elems)) - for _, idx := range l.GetOptionalIndices() { + for _, idx := range l.OptionalIndices() { optIndices[int(idx)] = true } un.str.WriteString("[") @@ -327,20 +328,20 @@ func (un *unparser) visitList(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitOptSelect(expr *exprpb.Expr) error { - c := expr.GetCallExpr() - args := c.GetArgs() +func (un *unparser) visitOptSelect(expr ast.Expr) error { + c := expr.AsCall() + args := c.Args() operand := args[0] - field := args[1].GetConstExpr().GetStringValue() - return un.visitSelectInternal(operand, false, ".?", field) + field := args[1].AsLiteral().(types.String) + return un.visitSelectInternal(operand, false, ".?", string(field)) } -func (un *unparser) visitSelect(expr *exprpb.Expr) error { - sel := expr.GetSelectExpr() - return un.visitSelectInternal(sel.GetOperand(), sel.GetTestOnly(), ".", sel.GetField()) +func (un *unparser) visitSelect(expr ast.Expr) error { + sel := expr.AsSelect() + return un.visitSelectInternal(sel.Operand(), sel.IsTestOnly(), ".", sel.FieldName()) } -func (un *unparser) visitSelectInternal(operand *exprpb.Expr, testOnly bool, op string, field string) error { +func (un *unparser) visitSelectInternal(operand ast.Expr, testOnly bool, op string, field string) error { // handle the case when the select expression was generated by the has() macro. if testOnly { un.str.WriteString("has(") @@ -358,34 +359,25 @@ func (un *unparser) visitSelectInternal(operand *exprpb.Expr, testOnly bool, op return nil } -func (un *unparser) visitStruct(expr *exprpb.Expr) error { - s := expr.GetStructExpr() - // If the message name is non-empty, then this should be treated as message construction. - if s.GetMessageName() != "" { - return un.visitStructMsg(expr) - } - // Otherwise, build a map. - return un.visitStructMap(expr) -} - -func (un *unparser) visitStructMsg(expr *exprpb.Expr) error { - m := expr.GetStructExpr() - entries := m.GetEntries() - un.str.WriteString(m.GetMessageName()) +func (un *unparser) visitStructMsg(expr ast.Expr) error { + m := expr.AsStruct() + fields := m.Fields() + un.str.WriteString(m.TypeName()) un.str.WriteString("{") - for i, entry := range entries { - f := entry.GetFieldKey() - if entry.GetOptionalEntry() { + for i, f := range fields { + field := f.AsStructField() + f := field.Name() + if field.IsOptional() { un.str.WriteString("?") } un.str.WriteString(f) un.str.WriteString(": ") - v := entry.GetValue() + v := field.Value() err := un.visit(v) if err != nil { return err } - if i < len(entries)-1 { + if i < len(fields)-1 { un.str.WriteString(", ") } } @@ -393,13 +385,14 @@ func (un *unparser) visitStructMsg(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitStructMap(expr *exprpb.Expr) error { - m := expr.GetStructExpr() - entries := m.GetEntries() +func (un *unparser) visitStructMap(expr ast.Expr) error { + m := expr.AsMap() + entries := m.Entries() un.str.WriteString("{") - for i, entry := range entries { - k := entry.GetMapKey() - if entry.GetOptionalEntry() { + for i, e := range entries { + entry := e.AsMapEntry() + k := entry.Key() + if entry.IsOptional() { un.str.WriteString("?") } err := un.visit(k) @@ -407,7 +400,7 @@ func (un *unparser) visitStructMap(expr *exprpb.Expr) error { return err } un.str.WriteString(": ") - v := entry.GetValue() + v := entry.Value() err = un.visit(v) if err != nil { return err @@ -420,16 +413,15 @@ func (un *unparser) visitStructMap(expr *exprpb.Expr) error { return nil } -func (un *unparser) visitMaybeMacroCall(expr *exprpb.Expr) (bool, error) { - macroCalls := un.info.GetMacroCalls() - call, found := macroCalls[expr.GetId()] +func (un *unparser) visitMaybeMacroCall(expr ast.Expr) (bool, error) { + call, found := un.info.GetMacroCall(expr.ID()) if !found { return false, nil } return true, un.visit(call) } -func (un *unparser) visitMaybeNested(expr *exprpb.Expr, nested bool) error { +func (un *unparser) visitMaybeNested(expr ast.Expr, nested bool) error { if nested { un.str.WriteString("(") } @@ -453,12 +445,12 @@ func isLeftRecursive(op string) bool { // precedence of the (possible) operation represented in the input Expr. // // If the expr is not a Call, the result is false. -func isSamePrecedence(op string, expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil { +func isSamePrecedence(op string, expr ast.Expr) bool { + if expr.Kind() != ast.CallKind { return false } - c := expr.GetCallExpr() - other := c.GetFunction() + c := expr.AsCall() + other := c.FunctionName() return operators.Precedence(op) == operators.Precedence(other) } @@ -466,16 +458,16 @@ func isSamePrecedence(op string, expr *exprpb.Expr) bool { // than the (possible) operation represented in the input Expr. // // If the expr is not a Call, the result is false. -func isLowerPrecedence(op string, expr *exprpb.Expr) bool { - c := expr.GetCallExpr() - other := c.GetFunction() +func isLowerPrecedence(op string, expr ast.Expr) bool { + c := expr.AsCall() + other := c.FunctionName() return operators.Precedence(op) < operators.Precedence(other) } // Indicates whether the expr is a complex operator, i.e., a call expression // with 2 or more arguments. -func isComplexOperator(expr *exprpb.Expr) bool { - if expr.GetCallExpr() != nil && len(expr.GetCallExpr().GetArgs()) >= 2 { +func isComplexOperator(expr ast.Expr) bool { + if expr.Kind() == ast.CallKind && len(expr.AsCall().Args()) >= 2 { return true } return false @@ -484,19 +476,19 @@ func isComplexOperator(expr *exprpb.Expr) bool { // Indicates whether it is a complex operation compared to another. // expr is *not* considered complex if it is not a call expression or has // less than two arguments, or if it has a higher precedence than op. -func isComplexOperatorWithRespectTo(op string, expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil || len(expr.GetCallExpr().GetArgs()) < 2 { +func isComplexOperatorWithRespectTo(op string, expr ast.Expr) bool { + if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 { return false } return isLowerPrecedence(op, expr) } // Indicate whether this is a binary or ternary operator. -func isBinaryOrTernaryOperator(expr *exprpb.Expr) bool { - if expr.GetCallExpr() == nil || len(expr.GetCallExpr().GetArgs()) < 2 { +func isBinaryOrTernaryOperator(expr ast.Expr) bool { + if expr.Kind() != ast.CallKind || len(expr.AsCall().Args()) < 2 { return false } - _, isBinaryOp := operators.FindReverseBinaryOperator(expr.GetCallExpr().GetFunction()) + _, isBinaryOp := operators.FindReverseBinaryOperator(expr.AsCall().FunctionName()) return isBinaryOp || isSamePrecedence(operators.Conditional, expr) } diff --git a/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md b/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md index 102bb529f..ec91408f9 100644 --- a/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md +++ b/vendor/github.com/onsi/ginkgo/v2/CHANGELOG.md @@ -1,3 +1,9 @@ +## 2.13.2 + +### Fixes +- Fix file handler leak (#1309) [e2e81c8] +- Avoid allocations with `(*regexp.Regexp).MatchString` (#1302) [3b2a2a7] + ## 2.13.1 ### Fixes diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/test_suite.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/test_suite.go index f3ae13bb1..df99875be 100644 --- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/test_suite.go +++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/internal/test_suite.go @@ -226,7 +226,7 @@ func suitesInDir(dir string, recurse bool) TestSuites { files, _ := os.ReadDir(dir) re := regexp.MustCompile(`^[^._].*_test\.go$`) for _, file := range files { - if !file.IsDir() && re.Match([]byte(file.Name())) { + if !file.IsDir() && re.MatchString(file.Name()) { suite := TestSuite{ Path: relPath(dir), PackageName: packageNameForSuite(dir), @@ -241,7 +241,7 @@ func suitesInDir(dir string, recurse bool) TestSuites { if recurse { re = regexp.MustCompile(`^[._]`) for _, file := range files { - if file.IsDir() && !re.Match([]byte(file.Name())) { + if file.IsDir() && !re.MatchString(file.Name()) { suites = append(suites, suitesInDir(dir+"/"+file.Name(), recurse)...) } } @@ -272,7 +272,7 @@ func filesHaveGinkgoSuite(dir string, files []os.DirEntry) bool { reGinkgo := regexp.MustCompile(`package ginkgo|\/ginkgo"|\/ginkgo\/v2"|\/ginkgo\/v2/dsl/`) for _, file := range files { - if !file.IsDir() && reTestFile.Match([]byte(file.Name())) { + if !file.IsDir() && reTestFile.MatchString(file.Name()) { contents, _ := os.ReadFile(dir + "/" + file.Name()) if reGinkgo.Match(contents) { return true diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/dependencies.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/dependencies.go index f5ddff30f..a34d94354 100644 --- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/dependencies.go +++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/dependencies.go @@ -78,7 +78,7 @@ func (d Dependencies) resolveAndAdd(deps []string, depth int) { if err != nil { continue } - if !pkg.Goroot && (!ginkgoAndGomegaFilter.Match([]byte(pkg.Dir)) || ginkgoIntegrationTestFilter.Match([]byte(pkg.Dir))) { + if !pkg.Goroot && (!ginkgoAndGomegaFilter.MatchString(pkg.Dir) || ginkgoIntegrationTestFilter.MatchString(pkg.Dir)) { d.addDepIfNotPresent(pkg.Dir, depth) } } diff --git a/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/package_hash.go b/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/package_hash.go index e9f7ec0cb..17d052bdc 100644 --- a/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/package_hash.go +++ b/vendor/github.com/onsi/ginkgo/v2/ginkgo/watch/package_hash.go @@ -79,7 +79,7 @@ func (p *PackageHash) computeHashes() (codeHash string, codeModifiedTime time.Ti continue } - if goTestRegExp.Match([]byte(info.Name())) { + if goTestRegExp.MatchString(info.Name()) { testHash += p.hashForFileInfo(info) if info.ModTime().After(testModifiedTime) { testModifiedTime = info.ModTime() @@ -87,7 +87,7 @@ func (p *PackageHash) computeHashes() (codeHash string, codeModifiedTime time.Ti continue } - if p.watchRegExp.Match([]byte(info.Name())) { + if p.watchRegExp.MatchString(info.Name()) { codeHash += p.hashForFileInfo(info) if info.ModTime().After(codeModifiedTime) { codeModifiedTime = info.ModTime() diff --git a/vendor/github.com/onsi/ginkgo/v2/reporters/json_report.go b/vendor/github.com/onsi/ginkgo/v2/reporters/json_report.go index be506f9b4..5d3e8db99 100644 --- a/vendor/github.com/onsi/ginkgo/v2/reporters/json_report.go +++ b/vendor/github.com/onsi/ginkgo/v2/reporters/json_report.go @@ -18,6 +18,7 @@ func GenerateJSONReport(report types.Report, destination string) error { if err != nil { return err } + defer f.Close() enc := json.NewEncoder(f) enc.SetIndent("", " ") err = enc.Encode([]types.Report{ @@ -26,7 +27,7 @@ func GenerateJSONReport(report types.Report, destination string) error { if err != nil { return err } - return f.Close() + return nil } // MergeJSONReports produces a single JSON-formatted report at the passed in destination by merging the JSON-formatted reports provided in sources @@ -57,11 +58,12 @@ func MergeAndCleanupJSONReports(sources []string, destination string) ([]string, if err != nil { return messages, err } + defer f.Close() enc := json.NewEncoder(f) enc.SetIndent("", " ") err = enc.Encode(allReports) if err != nil { return messages, err } - return messages, f.Close() + return messages, nil } diff --git a/vendor/github.com/onsi/ginkgo/v2/types/code_location.go b/vendor/github.com/onsi/ginkgo/v2/types/code_location.go index 9cd576817..57e87517e 100644 --- a/vendor/github.com/onsi/ginkgo/v2/types/code_location.go +++ b/vendor/github.com/onsi/ginkgo/v2/types/code_location.go @@ -149,7 +149,7 @@ func PruneStack(fullStackTrace string, skip int) string { re := regexp.MustCompile(`\/ginkgo\/|\/pkg\/testing\/|\/pkg\/runtime\/`) for i := 0; i < len(stack)/2; i++ { // We filter out based on the source code file name. - if !re.Match([]byte(stack[i*2+1])) { + if !re.MatchString(stack[i*2+1]) { prunedStack = append(prunedStack, stack[i*2]) prunedStack = append(prunedStack, stack[i*2+1]) } diff --git a/vendor/github.com/onsi/ginkgo/v2/types/version.go b/vendor/github.com/onsi/ginkgo/v2/types/version.go index 7a794d87a..a4a1524b4 100644 --- a/vendor/github.com/onsi/ginkgo/v2/types/version.go +++ b/vendor/github.com/onsi/ginkgo/v2/types/version.go @@ -1,3 +1,3 @@ package types -const VERSION = "2.13.1" +const VERSION = "2.13.2" diff --git a/vendor/golang.org/x/crypto/cryptobyte/asn1.go b/vendor/golang.org/x/crypto/cryptobyte/asn1.go index 6fc2838a3..2492f796a 100644 --- a/vendor/golang.org/x/crypto/cryptobyte/asn1.go +++ b/vendor/golang.org/x/crypto/cryptobyte/asn1.go @@ -733,13 +733,14 @@ func (s *String) ReadOptionalASN1OctetString(out *[]byte, outPresent *bool, tag return true } -// ReadOptionalASN1Boolean sets *out to the value of the next ASN.1 BOOLEAN or, -// if the next bytes are not an ASN.1 BOOLEAN, to the value of defaultValue. -// It reports whether the operation was successful. -func (s *String) ReadOptionalASN1Boolean(out *bool, defaultValue bool) bool { +// ReadOptionalASN1Boolean attempts to read an optional ASN.1 BOOLEAN +// explicitly tagged with tag into out and advances. If no element with a +// matching tag is present, it sets "out" to defaultValue instead. It reports +// whether the read was successful. +func (s *String) ReadOptionalASN1Boolean(out *bool, tag asn1.Tag, defaultValue bool) bool { var present bool var child String - if !s.ReadOptionalASN1(&child, &present, asn1.BOOLEAN) { + if !s.ReadOptionalASN1(&child, &present, tag) { return false } @@ -748,7 +749,7 @@ func (s *String) ReadOptionalASN1Boolean(out *bool, defaultValue bool) bool { return true } - return s.ReadASN1Boolean(out) + return child.ReadASN1Boolean(out) } func (s *String) readASN1(out *String, outTag *asn1.Tag, skipHeader bool) bool { diff --git a/vendor/golang.org/x/crypto/ssh/client_auth.go b/vendor/golang.org/x/crypto/ssh/client_auth.go index 5c3bc2572..34bf089d0 100644 --- a/vendor/golang.org/x/crypto/ssh/client_auth.go +++ b/vendor/golang.org/x/crypto/ssh/client_auth.go @@ -307,7 +307,10 @@ func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand } var methods []string var errSigAlgo error - for _, signer := range signers { + + origSignersLen := len(signers) + for idx := 0; idx < len(signers); idx++ { + signer := signers[idx] pub := signer.PublicKey() as, algo, err := pickSignatureAlgorithm(signer, extensions) if err != nil && errSigAlgo == nil { @@ -321,6 +324,21 @@ func (cb publicKeyCallback) auth(session []byte, user string, c packetConn, rand if err != nil { return authFailure, nil, err } + // OpenSSH 7.2-7.7 advertises support for rsa-sha2-256 and rsa-sha2-512 + // in the "server-sig-algs" extension but doesn't support these + // algorithms for certificate authentication, so if the server rejects + // the key try to use the obtained algorithm as if "server-sig-algs" had + // not been implemented if supported from the algorithm signer. + if !ok && idx < origSignersLen && isRSACert(algo) && algo != CertAlgoRSAv01 { + if contains(as.Algorithms(), KeyAlgoRSA) { + // We retry using the compat algorithm after all signers have + // been tried normally. + signers = append(signers, &multiAlgorithmSigner{ + AlgorithmSigner: as, + supportedAlgorithms: []string{KeyAlgoRSA}, + }) + } + } if !ok { continue } diff --git a/vendor/golang.org/x/crypto/ssh/common.go b/vendor/golang.org/x/crypto/ssh/common.go index dd2ab0d69..7e9c2cbc6 100644 --- a/vendor/golang.org/x/crypto/ssh/common.go +++ b/vendor/golang.org/x/crypto/ssh/common.go @@ -127,6 +127,14 @@ func isRSA(algo string) bool { return contains(algos, underlyingAlgo(algo)) } +func isRSACert(algo string) bool { + _, ok := certKeyAlgoNames[algo] + if !ok { + return false + } + return isRSA(algo) +} + // supportedPubKeyAuthAlgos specifies the supported client public key // authentication algorithms. Note that this doesn't include certificate types // since those use the underlying algorithm. This list is sent to the client if diff --git a/vendor/golang.org/x/crypto/ssh/server.go b/vendor/golang.org/x/crypto/ssh/server.go index 8f1505af9..7f0c236a9 100644 --- a/vendor/golang.org/x/crypto/ssh/server.go +++ b/vendor/golang.org/x/crypto/ssh/server.go @@ -337,7 +337,7 @@ func checkSourceAddress(addr net.Addr, sourceAddrs string) error { return fmt.Errorf("ssh: remote address %v is not allowed because of source-address restriction", addr) } -func gssExchangeToken(gssapiConfig *GSSAPIWithMICConfig, firstToken []byte, s *connection, +func gssExchangeToken(gssapiConfig *GSSAPIWithMICConfig, token []byte, s *connection, sessionID []byte, userAuthReq userAuthRequestMsg) (authErr error, perms *Permissions, err error) { gssAPIServer := gssapiConfig.Server defer gssAPIServer.DeleteSecContext() @@ -347,7 +347,7 @@ func gssExchangeToken(gssapiConfig *GSSAPIWithMICConfig, firstToken []byte, s *c outToken []byte needContinue bool ) - outToken, srcName, needContinue, err = gssAPIServer.AcceptSecContext(firstToken) + outToken, srcName, needContinue, err = gssAPIServer.AcceptSecContext(token) if err != nil { return err, nil, nil } @@ -369,6 +369,7 @@ func gssExchangeToken(gssapiConfig *GSSAPIWithMICConfig, firstToken []byte, s *c if err := Unmarshal(packet, userAuthGSSAPITokenReq); err != nil { return nil, nil, err } + token = userAuthGSSAPITokenReq.Token } packet, err := s.transport.readPacket() if err != nil { diff --git a/vendor/golang.org/x/crypto/ssh/tcpip.go b/vendor/golang.org/x/crypto/ssh/tcpip.go index 80d35f5ec..ef5059a11 100644 --- a/vendor/golang.org/x/crypto/ssh/tcpip.go +++ b/vendor/golang.org/x/crypto/ssh/tcpip.go @@ -5,6 +5,7 @@ package ssh import ( + "context" "errors" "fmt" "io" @@ -332,6 +333,40 @@ func (l *tcpListener) Addr() net.Addr { return l.laddr } +// DialContext initiates a connection to the addr from the remote host. +// +// The provided Context must be non-nil. If the context expires before the +// connection is complete, an error is returned. Once successfully connected, +// any expiration of the context will not affect the connection. +// +// See func Dial for additional information. +func (c *Client) DialContext(ctx context.Context, n, addr string) (net.Conn, error) { + if err := ctx.Err(); err != nil { + return nil, err + } + type connErr struct { + conn net.Conn + err error + } + ch := make(chan connErr) + go func() { + conn, err := c.Dial(n, addr) + select { + case ch <- connErr{conn, err}: + case <-ctx.Done(): + if conn != nil { + conn.Close() + } + } + }() + select { + case res := <-ch: + return res.conn, res.err + case <-ctx.Done(): + return nil, ctx.Err() + } +} + // Dial initiates a connection to the addr from the remote host. // The resulting connection has a zero LocalAddr() and RemoteAddr(). func (c *Client) Dial(n, addr string) (net.Conn, error) { diff --git a/vendor/golang.org/x/sys/unix/fcntl.go b/vendor/golang.org/x/sys/unix/fcntl.go index 58c6bfc70..6200876fb 100644 --- a/vendor/golang.org/x/sys/unix/fcntl.go +++ b/vendor/golang.org/x/sys/unix/fcntl.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build dragonfly || freebsd || linux || netbsd || openbsd +//go:build dragonfly || freebsd || linux || netbsd package unix diff --git a/vendor/golang.org/x/sys/unix/ioctl_linux.go b/vendor/golang.org/x/sys/unix/ioctl_linux.go index 0d12c0851..dbe680eab 100644 --- a/vendor/golang.org/x/sys/unix/ioctl_linux.go +++ b/vendor/golang.org/x/sys/unix/ioctl_linux.go @@ -231,3 +231,8 @@ func IoctlLoopGetStatus64(fd int) (*LoopInfo64, error) { func IoctlLoopSetStatus64(fd int, value *LoopInfo64) error { return ioctlPtr(fd, LOOP_SET_STATUS64, unsafe.Pointer(value)) } + +// IoctlLoopConfigure configures all loop device parameters in a single step +func IoctlLoopConfigure(fd int, value *LoopConfig) error { + return ioctlPtr(fd, LOOP_CONFIGURE, unsafe.Pointer(value)) +} diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index cbe24150a..6202638ba 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -519,6 +519,7 @@ ccflags="$@" $2 ~ /^LOCK_(SH|EX|NB|UN)$/ || $2 ~ /^LO_(KEY|NAME)_SIZE$/ || $2 ~ /^LOOP_(CLR|CTL|GET|SET)_/ || + $2 == "LOOP_CONFIGURE" || $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MREMAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT|UDP)_/ || $2 ~ /^NFC_(GENL|PROTO|COMM|RF|SE|DIRECTION|LLCP|SOCKPROTO)_/ || $2 ~ /^NFC_.*_(MAX)?SIZE$/ || @@ -560,7 +561,7 @@ ccflags="$@" $2 ~ /^RLIMIT_(AS|CORE|CPU|DATA|FSIZE|LOCKS|MEMLOCK|MSGQUEUE|NICE|NOFILE|NPROC|RSS|RTPRIO|RTTIME|SIGPENDING|STACK)|RLIM_INFINITY/ || $2 ~ /^PRIO_(PROCESS|PGRP|USER)/ || $2 ~ /^CLONE_[A-Z_]+/ || - $2 !~ /^(BPF_TIMEVAL|BPF_FIB_LOOKUP_[A-Z]+)$/ && + $2 !~ /^(BPF_TIMEVAL|BPF_FIB_LOOKUP_[A-Z]+|BPF_F_LINK)$/ && $2 ~ /^(BPF|DLT)_/ || $2 ~ /^AUDIT_/ || $2 ~ /^(CLOCK|TIMER)_/ || diff --git a/vendor/golang.org/x/sys/unix/syscall_bsd.go b/vendor/golang.org/x/sys/unix/syscall_bsd.go index 6f328e3a5..a00c3e545 100644 --- a/vendor/golang.org/x/sys/unix/syscall_bsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_bsd.go @@ -316,7 +316,7 @@ func GetsockoptString(fd, level, opt int) (string, error) { if err != nil { return "", err } - return string(buf[:vallen-1]), nil + return ByteSliceToString(buf[:vallen]), nil } //sys recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_linux.go b/vendor/golang.org/x/sys/unix/syscall_linux.go index a5e1c10e3..0f85e29e6 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -61,15 +61,23 @@ func FanotifyMark(fd int, flags uint, mask uint64, dirFd int, pathname string) ( } //sys fchmodat(dirfd int, path string, mode uint32) (err error) - -func Fchmodat(dirfd int, path string, mode uint32, flags int) (err error) { - // Linux fchmodat doesn't support the flags parameter. Mimick glibc's behavior - // and check the flags. Otherwise the mode would be applied to the symlink - // destination which is not what the user expects. - if flags&^AT_SYMLINK_NOFOLLOW != 0 { - return EINVAL - } else if flags&AT_SYMLINK_NOFOLLOW != 0 { - return EOPNOTSUPP +//sys fchmodat2(dirfd int, path string, mode uint32, flags int) (err error) + +func Fchmodat(dirfd int, path string, mode uint32, flags int) error { + // Linux fchmodat doesn't support the flags parameter, but fchmodat2 does. + // Try fchmodat2 if flags are specified. + if flags != 0 { + err := fchmodat2(dirfd, path, mode, flags) + if err == ENOSYS { + // fchmodat2 isn't available. If the flags are known to be valid, + // return EOPNOTSUPP to indicate that fchmodat doesn't support them. + if flags&^(AT_SYMLINK_NOFOLLOW|AT_EMPTY_PATH) != 0 { + return EINVAL + } else if flags&(AT_SYMLINK_NOFOLLOW|AT_EMPTY_PATH) != 0 { + return EOPNOTSUPP + } + } + return err } return fchmodat(dirfd, path, mode) } @@ -1302,7 +1310,7 @@ func GetsockoptString(fd, level, opt int) (string, error) { return "", err } } - return string(buf[:vallen-1]), nil + return ByteSliceToString(buf[:vallen]), nil } func GetsockoptTpacketStats(fd, level, opt int) (*TpacketStats, error) { diff --git a/vendor/golang.org/x/sys/unix/syscall_openbsd.go b/vendor/golang.org/x/sys/unix/syscall_openbsd.go index d2882ee04..b25343c71 100644 --- a/vendor/golang.org/x/sys/unix/syscall_openbsd.go +++ b/vendor/golang.org/x/sys/unix/syscall_openbsd.go @@ -166,6 +166,20 @@ func Getresgid() (rgid, egid, sgid int) { //sys sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) = SYS___SYSCTL +//sys fcntl(fd int, cmd int, arg int) (n int, err error) +//sys fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) = SYS_FCNTL + +// FcntlInt performs a fcntl syscall on fd with the provided command and argument. +func FcntlInt(fd uintptr, cmd, arg int) (int, error) { + return fcntl(int(fd), cmd, arg) +} + +// FcntlFlock performs a fcntl syscall for the F_GETLK, F_SETLK or F_SETLKW command. +func FcntlFlock(fd uintptr, cmd int, lk *Flock_t) error { + _, err := fcntlPtr(int(fd), cmd, unsafe.Pointer(lk)) + return err +} + //sys ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) func Ppoll(fds []PollFd, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { diff --git a/vendor/golang.org/x/sys/unix/syscall_solaris.go b/vendor/golang.org/x/sys/unix/syscall_solaris.go index 60c8142d4..21974af06 100644 --- a/vendor/golang.org/x/sys/unix/syscall_solaris.go +++ b/vendor/golang.org/x/sys/unix/syscall_solaris.go @@ -158,7 +158,7 @@ func GetsockoptString(fd, level, opt int) (string, error) { if err != nil { return "", err } - return string(buf[:vallen-1]), nil + return ByteSliceToString(buf[:vallen]), nil } const ImplementsGetwd = true diff --git a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go index d99d05f1b..b473038c6 100644 --- a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go @@ -1104,7 +1104,7 @@ func GetsockoptString(fd, level, opt int) (string, error) { return "", err } - return string(buf[:vallen-1]), nil + return ByteSliceToString(buf[:vallen]), nil } func Recvmsg(fd int, p, oob []byte, flags int) (n, oobn int, recvflags int, from Sockaddr, err error) { diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index 9c00cbf51..c73cfe2f1 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -486,7 +486,6 @@ const ( BPF_F_ANY_ALIGNMENT = 0x2 BPF_F_BEFORE = 0x8 BPF_F_ID = 0x20 - BPF_F_LINK = 0x2000 BPF_F_NETFILTER_IP_DEFRAG = 0x1 BPF_F_QUERY_EFFECTIVE = 0x1 BPF_F_REPLACE = 0x4 @@ -1802,6 +1801,7 @@ const ( LOCK_SH = 0x1 LOCK_UN = 0x8 LOOP_CLR_FD = 0x4c01 + LOOP_CONFIGURE = 0x4c0a LOOP_CTL_ADD = 0x4c80 LOOP_CTL_GET_FREE = 0x4c82 LOOP_CTL_REMOVE = 0x4c81 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux.go b/vendor/golang.org/x/sys/unix/zsyscall_linux.go index faca7a557..1488d2712 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux.go @@ -37,6 +37,21 @@ func fchmodat(dirfd int, path string, mode uint32) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fchmodat2(dirfd int, path string, mode uint32, flags int) (err error) { + var _p0 *byte + _p0, err = BytePtrFromString(path) + if err != nil { + return + } + _, _, e1 := Syscall6(SYS_FCHMODAT2, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(flags), 0, 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ioctl(fd int, req uint, arg uintptr) (err error) { _, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg)) if e1 != 0 { diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go index 88bfc2885..a1d061597 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s index 4cbeff171..41b561731 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_386.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $4 DATA ·libc_sysctl_trampoline_addr(SB)/4, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $4 +DATA ·libc_fcntl_trampoline_addr(SB)/4, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $4 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go index b8a67b99a..5b2a74097 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s index 1123f2757..4019a656f 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_amd64.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_fcntl_trampoline_addr(SB)/8, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $8 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go index af50a65c0..f6eda1344 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s index 82badae39..ac4af24f9 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $4 DATA ·libc_sysctl_trampoline_addr(SB)/4, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $4 +DATA ·libc_fcntl_trampoline_addr(SB)/4, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $4 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go index 8fb4ff36a..55df20ae9 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s index 24d7eecb9..f77d53212 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_arm64.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_fcntl_trampoline_addr(SB)/8, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $8 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go index f469a83ee..8c1155cbc 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s index 9a498a067..fae140b62 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_mips64.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_fcntl_trampoline_addr(SB)/8, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $8 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go index c26ca2e1a..7cc80c58d 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s index 1f224aa41..9d1e0ff06 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_ppc64.s @@ -213,6 +213,12 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + CALL libc_fcntl(SB) + RET +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_fcntl_trampoline_addr(SB)/8, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 CALL libc_ppoll(SB) RET diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go index bcc920dd2..0688737f4 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.go @@ -584,6 +584,32 @@ var libc_sysctl_trampoline_addr uintptr // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func fcntl(fd int, cmd int, arg int) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +var libc_fcntl_trampoline_addr uintptr + +//go:cgo_import_dynamic libc_fcntl fcntl "libc.so" + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + +func fcntlPtr(fd int, cmd int, arg unsafe.Pointer) (n int, err error) { + r0, _, e1 := syscall_syscall(libc_fcntl_trampoline_addr, uintptr(fd), uintptr(cmd), uintptr(arg)) + n = int(r0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) { r0, _, e1 := syscall_syscall6(libc_ppoll_trampoline_addr, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0) n = int(r0) diff --git a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s index 87a79c709..da115f9a4 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s +++ b/vendor/golang.org/x/sys/unix/zsyscall_openbsd_riscv64.s @@ -178,6 +178,11 @@ TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) +TEXT libc_fcntl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_fcntl(SB) +GLOBL ·libc_fcntl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_fcntl_trampoline_addr(SB)/8, $libc_fcntl_trampoline<>(SB) + TEXT libc_ppoll_trampoline<>(SB),NOSPLIT,$0-0 JMP libc_ppoll(SB) GLOBL ·libc_ppoll_trampoline_addr(SB), RODATA, $8 diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index 997bcd55a..bbf8399ff 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -2671,6 +2671,7 @@ const ( BPF_PROG_TYPE_LSM = 0x1d BPF_PROG_TYPE_SK_LOOKUP = 0x1e BPF_PROG_TYPE_SYSCALL = 0x1f + BPF_PROG_TYPE_NETFILTER = 0x20 BPF_CGROUP_INET_INGRESS = 0x0 BPF_CGROUP_INET_EGRESS = 0x1 BPF_CGROUP_INET_SOCK_CREATE = 0x2 @@ -2715,6 +2716,11 @@ const ( BPF_PERF_EVENT = 0x29 BPF_TRACE_KPROBE_MULTI = 0x2a BPF_LSM_CGROUP = 0x2b + BPF_STRUCT_OPS = 0x2c + BPF_NETFILTER = 0x2d + BPF_TCX_INGRESS = 0x2e + BPF_TCX_EGRESS = 0x2f + BPF_TRACE_UPROBE_MULTI = 0x30 BPF_LINK_TYPE_UNSPEC = 0x0 BPF_LINK_TYPE_RAW_TRACEPOINT = 0x1 BPF_LINK_TYPE_TRACING = 0x2 @@ -2725,6 +2731,18 @@ const ( BPF_LINK_TYPE_PERF_EVENT = 0x7 BPF_LINK_TYPE_KPROBE_MULTI = 0x8 BPF_LINK_TYPE_STRUCT_OPS = 0x9 + BPF_LINK_TYPE_NETFILTER = 0xa + BPF_LINK_TYPE_TCX = 0xb + BPF_LINK_TYPE_UPROBE_MULTI = 0xc + BPF_PERF_EVENT_UNSPEC = 0x0 + BPF_PERF_EVENT_UPROBE = 0x1 + BPF_PERF_EVENT_URETPROBE = 0x2 + BPF_PERF_EVENT_KPROBE = 0x3 + BPF_PERF_EVENT_KRETPROBE = 0x4 + BPF_PERF_EVENT_TRACEPOINT = 0x5 + BPF_PERF_EVENT_EVENT = 0x6 + BPF_F_KPROBE_MULTI_RETURN = 0x1 + BPF_F_UPROBE_MULTI_RETURN = 0x1 BPF_ANY = 0x0 BPF_NOEXIST = 0x1 BPF_EXIST = 0x2 @@ -2742,6 +2760,8 @@ const ( BPF_F_MMAPABLE = 0x400 BPF_F_PRESERVE_ELEMS = 0x800 BPF_F_INNER_MAP = 0x1000 + BPF_F_LINK = 0x2000 + BPF_F_PATH_FD = 0x4000 BPF_STATS_RUN_TIME = 0x0 BPF_STACK_BUILD_ID_EMPTY = 0x0 BPF_STACK_BUILD_ID_VALID = 0x1 @@ -2762,6 +2782,7 @@ const ( BPF_F_ZERO_CSUM_TX = 0x2 BPF_F_DONT_FRAGMENT = 0x4 BPF_F_SEQ_NUMBER = 0x8 + BPF_F_NO_TUNNEL_KEY = 0x10 BPF_F_TUNINFO_FLAGS = 0x10 BPF_F_INDEX_MASK = 0xffffffff BPF_F_CURRENT_CPU = 0xffffffff @@ -2778,6 +2799,8 @@ const ( BPF_F_ADJ_ROOM_ENCAP_L4_UDP = 0x10 BPF_F_ADJ_ROOM_NO_CSUM_RESET = 0x20 BPF_F_ADJ_ROOM_ENCAP_L2_ETH = 0x40 + BPF_F_ADJ_ROOM_DECAP_L3_IPV4 = 0x80 + BPF_F_ADJ_ROOM_DECAP_L3_IPV6 = 0x100 BPF_ADJ_ROOM_ENCAP_L2_MASK = 0xff BPF_ADJ_ROOM_ENCAP_L2_SHIFT = 0x38 BPF_F_SYSCTL_BASE_NAME = 0x1 @@ -2866,6 +2889,8 @@ const ( BPF_DEVCG_DEV_CHAR = 0x2 BPF_FIB_LOOKUP_DIRECT = 0x1 BPF_FIB_LOOKUP_OUTPUT = 0x2 + BPF_FIB_LOOKUP_SKIP_NEIGH = 0x4 + BPF_FIB_LOOKUP_TBID = 0x8 BPF_FIB_LKUP_RET_SUCCESS = 0x0 BPF_FIB_LKUP_RET_BLACKHOLE = 0x1 BPF_FIB_LKUP_RET_UNREACHABLE = 0x2 @@ -2901,6 +2926,7 @@ const ( BPF_CORE_ENUMVAL_EXISTS = 0xa BPF_CORE_ENUMVAL_VALUE = 0xb BPF_CORE_TYPE_MATCHES = 0xc + BPF_F_TIMER_ABS = 0x1 ) const ( @@ -2979,6 +3005,12 @@ type LoopInfo64 struct { Encrypt_key [32]uint8 Init [2]uint64 } +type LoopConfig struct { + Fd uint32 + Size uint32 + Info LoopInfo64 + _ [8]uint64 +} type TIPCSocketAddr struct { Ref uint32 diff --git a/vendor/golang.org/x/sys/windows/syscall_windows.go b/vendor/golang.org/x/sys/windows/syscall_windows.go index fb6cfd046..47dc57967 100644 --- a/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -155,6 +155,8 @@ func NewCallbackCDecl(fn interface{}) uintptr { //sys GetModuleFileName(module Handle, filename *uint16, size uint32) (n uint32, err error) = kernel32.GetModuleFileNameW //sys GetModuleHandleEx(flags uint32, moduleName *uint16, module *Handle) (err error) = kernel32.GetModuleHandleExW //sys SetDefaultDllDirectories(directoryFlags uint32) (err error) +//sys AddDllDirectory(path *uint16) (cookie uintptr, err error) = kernel32.AddDllDirectory +//sys RemoveDllDirectory(cookie uintptr) (err error) = kernel32.RemoveDllDirectory //sys SetDllDirectory(path string) (err error) = kernel32.SetDllDirectoryW //sys GetVersion() (ver uint32, err error) //sys FormatMessage(flags uint32, msgsrc uintptr, msgid uint32, langid uint32, buf []uint16, args *byte) (n uint32, err error) = FormatMessageW diff --git a/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/vendor/golang.org/x/sys/windows/zsyscall_windows.go index db6282e00..146a1f019 100644 --- a/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -184,6 +184,7 @@ var ( procGetAdaptersInfo = modiphlpapi.NewProc("GetAdaptersInfo") procGetBestInterfaceEx = modiphlpapi.NewProc("GetBestInterfaceEx") procGetIfEntry = modiphlpapi.NewProc("GetIfEntry") + procAddDllDirectory = modkernel32.NewProc("AddDllDirectory") procAssignProcessToJobObject = modkernel32.NewProc("AssignProcessToJobObject") procCancelIo = modkernel32.NewProc("CancelIo") procCancelIoEx = modkernel32.NewProc("CancelIoEx") @@ -330,6 +331,7 @@ var ( procReadProcessMemory = modkernel32.NewProc("ReadProcessMemory") procReleaseMutex = modkernel32.NewProc("ReleaseMutex") procRemoveDirectoryW = modkernel32.NewProc("RemoveDirectoryW") + procRemoveDllDirectory = modkernel32.NewProc("RemoveDllDirectory") procResetEvent = modkernel32.NewProc("ResetEvent") procResizePseudoConsole = modkernel32.NewProc("ResizePseudoConsole") procResumeThread = modkernel32.NewProc("ResumeThread") @@ -1605,6 +1607,15 @@ func GetIfEntry(pIfRow *MibIfRow) (errcode error) { return } +func AddDllDirectory(path *uint16) (cookie uintptr, err error) { + r0, _, e1 := syscall.Syscall(procAddDllDirectory.Addr(), 1, uintptr(unsafe.Pointer(path)), 0, 0) + cookie = uintptr(r0) + if cookie == 0 { + err = errnoErr(e1) + } + return +} + func AssignProcessToJobObject(job Handle, process Handle) (err error) { r1, _, e1 := syscall.Syscall(procAssignProcessToJobObject.Addr(), 2, uintptr(job), uintptr(process), 0) if r1 == 0 { @@ -2879,6 +2890,14 @@ func RemoveDirectory(path *uint16) (err error) { return } +func RemoveDllDirectory(cookie uintptr) (err error) { + r1, _, e1 := syscall.Syscall(procRemoveDllDirectory.Addr(), 1, uintptr(cookie), 0, 0) + if r1 == 0 { + err = errnoErr(e1) + } + return +} + func ResetEvent(event Handle) (err error) { r1, _, e1 := syscall.Syscall(procResetEvent.Addr(), 1, uintptr(event), 0, 0) if r1 == 0 { diff --git a/vendor/golang.org/x/time/rate/rate.go b/vendor/golang.org/x/time/rate/rate.go index f0e0cf3cb..8f6c7f493 100644 --- a/vendor/golang.org/x/time/rate/rate.go +++ b/vendor/golang.org/x/time/rate/rate.go @@ -52,6 +52,8 @@ func Every(interval time.Duration) Limit { // or its associated context.Context is canceled. // // The methods AllowN, ReserveN, and WaitN consume n tokens. +// +// Limiter is safe for simultaneous use by multiple goroutines. type Limiter struct { mu sync.Mutex limit Limit diff --git a/vendor/golang.org/x/xerrors/LICENSE b/vendor/golang.org/x/xerrors/LICENSE deleted file mode 100644 index e4a47e17f..000000000 --- a/vendor/golang.org/x/xerrors/LICENSE +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2019 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/xerrors/PATENTS b/vendor/golang.org/x/xerrors/PATENTS deleted file mode 100644 index 733099041..000000000 --- a/vendor/golang.org/x/xerrors/PATENTS +++ /dev/null @@ -1,22 +0,0 @@ -Additional IP Rights Grant (Patents) - -"This implementation" means the copyrightable works distributed by -Google as part of the Go project. - -Google hereby grants to You a perpetual, worldwide, non-exclusive, -no-charge, royalty-free, irrevocable (except as stated in this section) -patent license to make, have made, use, offer to sell, sell, import, -transfer and otherwise run, modify and propagate the contents of this -implementation of Go, where such license applies only to those patent -claims, both currently owned or controlled by Google and acquired in -the future, licensable by Google that are necessarily infringed by this -implementation of Go. This grant does not include claims that would be -infringed only as a consequence of further modification of this -implementation. If you or your agent or exclusive licensee institute or -order or agree to the institution of patent litigation against any -entity (including a cross-claim or counterclaim in a lawsuit) alleging -that this implementation of Go or any code incorporated within this -implementation of Go constitutes direct or contributory patent -infringement, or inducement of patent infringement, then any patent -rights granted to you under this License for this implementation of Go -shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/xerrors/README b/vendor/golang.org/x/xerrors/README deleted file mode 100644 index aac7867a5..000000000 --- a/vendor/golang.org/x/xerrors/README +++ /dev/null @@ -1,2 +0,0 @@ -This repository holds the transition packages for the new Go 1.13 error values. -See golang.org/design/29934-error-values. diff --git a/vendor/golang.org/x/xerrors/adaptor.go b/vendor/golang.org/x/xerrors/adaptor.go deleted file mode 100644 index 4317f2483..000000000 --- a/vendor/golang.org/x/xerrors/adaptor.go +++ /dev/null @@ -1,193 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -import ( - "bytes" - "fmt" - "io" - "reflect" - "strconv" -) - -// FormatError calls the FormatError method of f with an errors.Printer -// configured according to s and verb, and writes the result to s. -func FormatError(f Formatter, s fmt.State, verb rune) { - // Assuming this function is only called from the Format method, and given - // that FormatError takes precedence over Format, it cannot be called from - // any package that supports errors.Formatter. It is therefore safe to - // disregard that State may be a specific printer implementation and use one - // of our choice instead. - - // limitations: does not support printing error as Go struct. - - var ( - sep = " " // separator before next error - p = &state{State: s} - direct = true - ) - - var err error = f - - switch verb { - // Note that this switch must match the preference order - // for ordinary string printing (%#v before %+v, and so on). - - case 'v': - if s.Flag('#') { - if stringer, ok := err.(fmt.GoStringer); ok { - io.WriteString(&p.buf, stringer.GoString()) - goto exit - } - // proceed as if it were %v - } else if s.Flag('+') { - p.printDetail = true - sep = "\n - " - } - case 's': - case 'q', 'x', 'X': - // Use an intermediate buffer in the rare cases that precision, - // truncation, or one of the alternative verbs (q, x, and X) are - // specified. - direct = false - - default: - p.buf.WriteString("%!") - p.buf.WriteRune(verb) - p.buf.WriteByte('(') - switch { - case err != nil: - p.buf.WriteString(reflect.TypeOf(f).String()) - default: - p.buf.WriteString("") - } - p.buf.WriteByte(')') - io.Copy(s, &p.buf) - return - } - -loop: - for { - switch v := err.(type) { - case Formatter: - err = v.FormatError((*printer)(p)) - case fmt.Formatter: - v.Format(p, 'v') - break loop - default: - io.WriteString(&p.buf, v.Error()) - break loop - } - if err == nil { - break - } - if p.needColon || !p.printDetail { - p.buf.WriteByte(':') - p.needColon = false - } - p.buf.WriteString(sep) - p.inDetail = false - p.needNewline = false - } - -exit: - width, okW := s.Width() - prec, okP := s.Precision() - - if !direct || (okW && width > 0) || okP { - // Construct format string from State s. - format := []byte{'%'} - if s.Flag('-') { - format = append(format, '-') - } - if s.Flag('+') { - format = append(format, '+') - } - if s.Flag(' ') { - format = append(format, ' ') - } - if okW { - format = strconv.AppendInt(format, int64(width), 10) - } - if okP { - format = append(format, '.') - format = strconv.AppendInt(format, int64(prec), 10) - } - format = append(format, string(verb)...) - fmt.Fprintf(s, string(format), p.buf.String()) - } else { - io.Copy(s, &p.buf) - } -} - -var detailSep = []byte("\n ") - -// state tracks error printing state. It implements fmt.State. -type state struct { - fmt.State - buf bytes.Buffer - - printDetail bool - inDetail bool - needColon bool - needNewline bool -} - -func (s *state) Write(b []byte) (n int, err error) { - if s.printDetail { - if len(b) == 0 { - return 0, nil - } - if s.inDetail && s.needColon { - s.needNewline = true - if b[0] == '\n' { - b = b[1:] - } - } - k := 0 - for i, c := range b { - if s.needNewline { - if s.inDetail && s.needColon { - s.buf.WriteByte(':') - s.needColon = false - } - s.buf.Write(detailSep) - s.needNewline = false - } - if c == '\n' { - s.buf.Write(b[k:i]) - k = i + 1 - s.needNewline = true - } - } - s.buf.Write(b[k:]) - if !s.inDetail { - s.needColon = true - } - } else if !s.inDetail { - s.buf.Write(b) - } - return len(b), nil -} - -// printer wraps a state to implement an xerrors.Printer. -type printer state - -func (s *printer) Print(args ...interface{}) { - if !s.inDetail || s.printDetail { - fmt.Fprint((*state)(s), args...) - } -} - -func (s *printer) Printf(format string, args ...interface{}) { - if !s.inDetail || s.printDetail { - fmt.Fprintf((*state)(s), format, args...) - } -} - -func (s *printer) Detail() bool { - s.inDetail = true - return s.printDetail -} diff --git a/vendor/golang.org/x/xerrors/codereview.cfg b/vendor/golang.org/x/xerrors/codereview.cfg deleted file mode 100644 index 3f8b14b64..000000000 --- a/vendor/golang.org/x/xerrors/codereview.cfg +++ /dev/null @@ -1 +0,0 @@ -issuerepo: golang/go diff --git a/vendor/golang.org/x/xerrors/doc.go b/vendor/golang.org/x/xerrors/doc.go deleted file mode 100644 index 2ef99f5a8..000000000 --- a/vendor/golang.org/x/xerrors/doc.go +++ /dev/null @@ -1,23 +0,0 @@ -// Copyright 2019 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package xerrors implements functions to manipulate errors. -// -// This package is based on the Go 2 proposal for error values: -// -// https://golang.org/design/29934-error-values -// -// These functions were incorporated into the standard library's errors package -// in Go 1.13: -// - Is -// - As -// - Unwrap -// -// Also, Errorf's %w verb was incorporated into fmt.Errorf. -// -// Use this package to get equivalent behavior in all supported Go versions. -// -// No other features of this package were included in Go 1.13, and at present -// there are no plans to include any of them. -package xerrors // import "golang.org/x/xerrors" diff --git a/vendor/golang.org/x/xerrors/errors.go b/vendor/golang.org/x/xerrors/errors.go deleted file mode 100644 index e88d3772d..000000000 --- a/vendor/golang.org/x/xerrors/errors.go +++ /dev/null @@ -1,33 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -import "fmt" - -// errorString is a trivial implementation of error. -type errorString struct { - s string - frame Frame -} - -// New returns an error that formats as the given text. -// -// The returned error contains a Frame set to the caller's location and -// implements Formatter to show this information when printed with details. -func New(text string) error { - return &errorString{text, Caller(1)} -} - -func (e *errorString) Error() string { - return e.s -} - -func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) } - -func (e *errorString) FormatError(p Printer) (next error) { - p.Print(e.s) - e.frame.Format(p) - return nil -} diff --git a/vendor/golang.org/x/xerrors/fmt.go b/vendor/golang.org/x/xerrors/fmt.go deleted file mode 100644 index 27a5d70bd..000000000 --- a/vendor/golang.org/x/xerrors/fmt.go +++ /dev/null @@ -1,190 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf8" - - "golang.org/x/xerrors/internal" -) - -const percentBangString = "%!" - -// Errorf formats according to a format specifier and returns the string as a -// value that satisfies error. -// -// The returned error includes the file and line number of the caller when -// formatted with additional detail enabled. If the last argument is an error -// the returned error's Format method will return it if the format string ends -// with ": %s", ": %v", or ": %w". If the last argument is an error and the -// format string ends with ": %w", the returned error implements an Unwrap -// method returning it. -// -// If the format specifier includes a %w verb with an error operand in a -// position other than at the end, the returned error will still implement an -// Unwrap method returning the operand, but the error's Format method will not -// return the wrapped error. -// -// It is invalid to include more than one %w verb or to supply it with an -// operand that does not implement the error interface. The %w verb is otherwise -// a synonym for %v. -// -// Note that as of Go 1.13, the fmt.Errorf function will do error formatting, -// but it will not capture a stack backtrace. -func Errorf(format string, a ...interface{}) error { - format = formatPlusW(format) - // Support a ": %[wsv]" suffix, which works well with xerrors.Formatter. - wrap := strings.HasSuffix(format, ": %w") - idx, format2, ok := parsePercentW(format) - percentWElsewhere := !wrap && idx >= 0 - if !percentWElsewhere && (wrap || strings.HasSuffix(format, ": %s") || strings.HasSuffix(format, ": %v")) { - err := errorAt(a, len(a)-1) - if err == nil { - return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)} - } - // TODO: this is not entirely correct. The error value could be - // printed elsewhere in format if it mixes numbered with unnumbered - // substitutions. With relatively small changes to doPrintf we can - // have it optionally ignore extra arguments and pass the argument - // list in its entirety. - msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...) - frame := Frame{} - if internal.EnableTrace { - frame = Caller(1) - } - if wrap { - return &wrapError{msg, err, frame} - } - return &noWrapError{msg, err, frame} - } - // Support %w anywhere. - // TODO: don't repeat the wrapped error's message when %w occurs in the middle. - msg := fmt.Sprintf(format2, a...) - if idx < 0 { - return &noWrapError{msg, nil, Caller(1)} - } - err := errorAt(a, idx) - if !ok || err == nil { - // Too many %ws or argument of %w is not an error. Approximate the Go - // 1.13 fmt.Errorf message. - return &noWrapError{fmt.Sprintf("%sw(%s)", percentBangString, msg), nil, Caller(1)} - } - frame := Frame{} - if internal.EnableTrace { - frame = Caller(1) - } - return &wrapError{msg, err, frame} -} - -func errorAt(args []interface{}, i int) error { - if i < 0 || i >= len(args) { - return nil - } - err, ok := args[i].(error) - if !ok { - return nil - } - return err -} - -// formatPlusW is used to avoid the vet check that will barf at %w. -func formatPlusW(s string) string { - return s -} - -// Return the index of the only %w in format, or -1 if none. -// Also return a rewritten format string with %w replaced by %v, and -// false if there is more than one %w. -// TODO: handle "%[N]w". -func parsePercentW(format string) (idx int, newFormat string, ok bool) { - // Loosely copied from golang.org/x/tools/go/analysis/passes/printf/printf.go. - idx = -1 - ok = true - n := 0 - sz := 0 - var isW bool - for i := 0; i < len(format); i += sz { - if format[i] != '%' { - sz = 1 - continue - } - // "%%" is not a format directive. - if i+1 < len(format) && format[i+1] == '%' { - sz = 2 - continue - } - sz, isW = parsePrintfVerb(format[i:]) - if isW { - if idx >= 0 { - ok = false - } else { - idx = n - } - // "Replace" the last character, the 'w', with a 'v'. - p := i + sz - 1 - format = format[:p] + "v" + format[p+1:] - } - n++ - } - return idx, format, ok -} - -// Parse the printf verb starting with a % at s[0]. -// Return how many bytes it occupies and whether the verb is 'w'. -func parsePrintfVerb(s string) (int, bool) { - // Assume only that the directive is a sequence of non-letters followed by a single letter. - sz := 0 - var r rune - for i := 1; i < len(s); i += sz { - r, sz = utf8.DecodeRuneInString(s[i:]) - if unicode.IsLetter(r) { - return i + sz, r == 'w' - } - } - return len(s), false -} - -type noWrapError struct { - msg string - err error - frame Frame -} - -func (e *noWrapError) Error() string { - return fmt.Sprint(e) -} - -func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } - -func (e *noWrapError) FormatError(p Printer) (next error) { - p.Print(e.msg) - e.frame.Format(p) - return e.err -} - -type wrapError struct { - msg string - err error - frame Frame -} - -func (e *wrapError) Error() string { - return fmt.Sprint(e) -} - -func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) } - -func (e *wrapError) FormatError(p Printer) (next error) { - p.Print(e.msg) - e.frame.Format(p) - return e.err -} - -func (e *wrapError) Unwrap() error { - return e.err -} diff --git a/vendor/golang.org/x/xerrors/format.go b/vendor/golang.org/x/xerrors/format.go deleted file mode 100644 index 1bc9c26b9..000000000 --- a/vendor/golang.org/x/xerrors/format.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -// A Formatter formats error messages. -type Formatter interface { - error - - // FormatError prints the receiver's first error and returns the next error in - // the error chain, if any. - FormatError(p Printer) (next error) -} - -// A Printer formats error messages. -// -// The most common implementation of Printer is the one provided by package fmt -// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message -// typically provide their own implementations. -type Printer interface { - // Print appends args to the message output. - Print(args ...interface{}) - - // Printf writes a formatted string. - Printf(format string, args ...interface{}) - - // Detail reports whether error detail is requested. - // After the first call to Detail, all text written to the Printer - // is formatted as additional detail, or ignored when - // detail has not been requested. - // If Detail returns false, the caller can avoid printing the detail at all. - Detail() bool -} diff --git a/vendor/golang.org/x/xerrors/frame.go b/vendor/golang.org/x/xerrors/frame.go deleted file mode 100644 index 0de628ec5..000000000 --- a/vendor/golang.org/x/xerrors/frame.go +++ /dev/null @@ -1,56 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -import ( - "runtime" -) - -// A Frame contains part of a call stack. -type Frame struct { - // Make room for three PCs: the one we were asked for, what it called, - // and possibly a PC for skipPleaseUseCallersFrames. See: - // https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169 - frames [3]uintptr -} - -// Caller returns a Frame that describes a frame on the caller's stack. -// The argument skip is the number of frames to skip over. -// Caller(0) returns the frame for the caller of Caller. -func Caller(skip int) Frame { - var s Frame - runtime.Callers(skip+1, s.frames[:]) - return s -} - -// location reports the file, line, and function of a frame. -// -// The returned function may be "" even if file and line are not. -func (f Frame) location() (function, file string, line int) { - frames := runtime.CallersFrames(f.frames[:]) - if _, ok := frames.Next(); !ok { - return "", "", 0 - } - fr, ok := frames.Next() - if !ok { - return "", "", 0 - } - return fr.Function, fr.File, fr.Line -} - -// Format prints the stack as error detail. -// It should be called from an error's Format implementation -// after printing any other error detail. -func (f Frame) Format(p Printer) { - if p.Detail() { - function, file, line := f.location() - if function != "" { - p.Printf("%s\n ", function) - } - if file != "" { - p.Printf("%s:%d\n", file, line) - } - } -} diff --git a/vendor/golang.org/x/xerrors/internal/internal.go b/vendor/golang.org/x/xerrors/internal/internal.go deleted file mode 100644 index 89f4eca5d..000000000 --- a/vendor/golang.org/x/xerrors/internal/internal.go +++ /dev/null @@ -1,8 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package internal - -// EnableTrace indicates whether stack information should be recorded in errors. -var EnableTrace = true diff --git a/vendor/golang.org/x/xerrors/wrap.go b/vendor/golang.org/x/xerrors/wrap.go deleted file mode 100644 index 9842758ca..000000000 --- a/vendor/golang.org/x/xerrors/wrap.go +++ /dev/null @@ -1,112 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package xerrors - -import ( - "reflect" -) - -// A Wrapper provides context around another error. -type Wrapper interface { - // Unwrap returns the next error in the error chain. - // If there is no next error, Unwrap returns nil. - Unwrap() error -} - -// Opaque returns an error with the same error formatting as err -// but that does not match err and cannot be unwrapped. -func Opaque(err error) error { - return noWrapper{err} -} - -type noWrapper struct { - error -} - -func (e noWrapper) FormatError(p Printer) (next error) { - if f, ok := e.error.(Formatter); ok { - return f.FormatError(p) - } - p.Print(e.error) - return nil -} - -// Unwrap returns the result of calling the Unwrap method on err, if err implements -// Unwrap. Otherwise, Unwrap returns nil. -// -// Deprecated: As of Go 1.13, use errors.Unwrap instead. -func Unwrap(err error) error { - u, ok := err.(Wrapper) - if !ok { - return nil - } - return u.Unwrap() -} - -// Is reports whether any error in err's chain matches target. -// -// An error is considered to match a target if it is equal to that target or if -// it implements a method Is(error) bool such that Is(target) returns true. -// -// Deprecated: As of Go 1.13, use errors.Is instead. -func Is(err, target error) bool { - if target == nil { - return err == target - } - - isComparable := reflect.TypeOf(target).Comparable() - for { - if isComparable && err == target { - return true - } - if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) { - return true - } - // TODO: consider supporing target.Is(err). This would allow - // user-definable predicates, but also may allow for coping with sloppy - // APIs, thereby making it easier to get away with them. - if err = Unwrap(err); err == nil { - return false - } - } -} - -// As finds the first error in err's chain that matches the type to which target -// points, and if so, sets the target to its value and returns true. An error -// matches a type if it is assignable to the target type, or if it has a method -// As(interface{}) bool such that As(target) returns true. As will panic if target -// is not a non-nil pointer to a type which implements error or is of interface type. -// -// The As method should set the target to its value and return true if err -// matches the type to which target points. -// -// Deprecated: As of Go 1.13, use errors.As instead. -func As(err error, target interface{}) bool { - if target == nil { - panic("errors: target cannot be nil") - } - val := reflect.ValueOf(target) - typ := val.Type() - if typ.Kind() != reflect.Ptr || val.IsNil() { - panic("errors: target must be a non-nil pointer") - } - if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) { - panic("errors: *target must be interface or implement error") - } - targetType := typ.Elem() - for err != nil { - if reflect.TypeOf(err).AssignableTo(targetType) { - val.Elem().Set(reflect.ValueOf(err)) - return true - } - if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) { - return true - } - err = Unwrap(err) - } - return false -} - -var errorType = reflect.TypeOf((*error)(nil)).Elem() diff --git a/vendor/google.golang.org/api/internal/version.go b/vendor/google.golang.org/api/internal/version.go index 028e96ede..c95bd7144 100644 --- a/vendor/google.golang.org/api/internal/version.go +++ b/vendor/google.golang.org/api/internal/version.go @@ -5,4 +5,4 @@ package internal // Version is the current tagged release of the library. -const Version = "0.151.0" +const Version = "0.152.0" diff --git a/vendor/modules.txt b/vendor/modules.txt index 5435e21ae..92dbb2a13 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -1,4 +1,4 @@ -# cloud.google.com/go v0.110.10 +# cloud.google.com/go v0.111.0 ## explicit; go 1.19 cloud.google.com/go/internal cloud.google.com/go/internal/optional @@ -32,9 +32,9 @@ github.com/Masterminds/semver # github.com/NYTimes/gziphandler v1.1.1 ## explicit; go 1.11 github.com/NYTimes/gziphandler -# github.com/antlr/antlr4/runtime/Go/antlr/v4 v4.0.0-20230305170008-8188dc5388df -## explicit; go 1.18 -github.com/antlr/antlr4/runtime/Go/antlr/v4 +# github.com/antlr4-go/antlr/v4 v4.13.0 +## explicit; go 1.20 +github.com/antlr4-go/antlr/v4 # github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 ## explicit; go 1.13 github.com/asaskevich/govalidator @@ -126,17 +126,21 @@ github.com/golang/protobuf/ptypes/any github.com/golang/protobuf/ptypes/duration github.com/golang/protobuf/ptypes/timestamp github.com/golang/protobuf/ptypes/wrappers -# github.com/google/cel-go v0.16.1 +# github.com/google/cel-go v0.18.2 ## explicit; go 1.18 github.com/google/cel-go/cel github.com/google/cel-go/checker github.com/google/cel-go/checker/decls github.com/google/cel-go/common +github.com/google/cel-go/common/ast github.com/google/cel-go/common/containers github.com/google/cel-go/common/debug +github.com/google/cel-go/common/decls +github.com/google/cel-go/common/functions github.com/google/cel-go/common/operators github.com/google/cel-go/common/overloads github.com/google/cel-go/common/runes +github.com/google/cel-go/common/stdlib github.com/google/cel-go/common/types github.com/google/cel-go/common/types/pb github.com/google/cel-go/common/types/ref @@ -265,7 +269,7 @@ github.com/modern-go/reflect2 # github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 ## explicit github.com/munnerz/goautoneg -# github.com/onsi/ginkgo/v2 v2.13.1 +# github.com/onsi/ginkgo/v2 v2.13.2 ## explicit; go 1.18 github.com/onsi/ginkgo/v2 github.com/onsi/ginkgo/v2/config @@ -461,7 +465,7 @@ go.uber.org/zap/zapgrpc # go4.org v0.0.0-20230225012048-214862532bf5 ## explicit; go 1.13 go4.org/bytereplacer -# golang.org/x/crypto v0.15.0 +# golang.org/x/crypto v0.16.0 ## explicit; go 1.18 golang.org/x/crypto/blowfish golang.org/x/crypto/chacha20 @@ -484,7 +488,7 @@ golang.org/x/crypto/ssh/internal/bcrypt_pbkdf golang.org/x/exp/constraints golang.org/x/exp/maps golang.org/x/exp/slices -# golang.org/x/net v0.18.0 +# golang.org/x/net v0.19.0 ## explicit; go 1.18 golang.org/x/net/context golang.org/x/net/html @@ -499,7 +503,7 @@ golang.org/x/net/internal/timeseries golang.org/x/net/proxy golang.org/x/net/trace golang.org/x/net/websocket -# golang.org/x/oauth2 v0.14.0 +# golang.org/x/oauth2 v0.15.0 ## explicit; go 1.18 golang.org/x/oauth2 golang.org/x/oauth2/authhandler @@ -514,14 +518,14 @@ golang.org/x/oauth2/jwt ## explicit; go 1.18 golang.org/x/sync/semaphore golang.org/x/sync/singleflight -# golang.org/x/sys v0.14.0 +# golang.org/x/sys v0.15.0 ## explicit; go 1.18 golang.org/x/sys/cpu golang.org/x/sys/plan9 golang.org/x/sys/unix golang.org/x/sys/windows golang.org/x/sys/windows/registry -# golang.org/x/term v0.14.0 +# golang.org/x/term v0.15.0 ## explicit; go 1.18 golang.org/x/term # golang.org/x/text v0.14.0 @@ -555,21 +559,19 @@ golang.org/x/text/transform golang.org/x/text/unicode/bidi golang.org/x/text/unicode/norm golang.org/x/text/width -# golang.org/x/time v0.4.0 +# golang.org/x/time v0.5.0 ## explicit; go 1.18 golang.org/x/time/rate -# golang.org/x/tools v0.15.0 +# golang.org/x/tools v0.16.0 ## explicit; go 1.18 golang.org/x/tools/go/ast/inspector golang.org/x/tools/internal/typeparams # golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 ## explicit; go 1.18 -golang.org/x/xerrors -golang.org/x/xerrors/internal # gomodules.xyz/jsonpatch/v2 v2.4.0 ## explicit; go 1.20 gomodules.xyz/jsonpatch/v2 -# google.golang.org/api v0.151.0 +# google.golang.org/api v0.152.0 ## explicit; go 1.19 google.golang.org/api/cloudresourcemanager/v1 google.golang.org/api/googleapi