diff --git a/go.mod b/go.mod index d636b6f618..31a4749b13 100644 --- a/go.mod +++ b/go.mod @@ -6,7 +6,7 @@ require ( cloud.google.com/go/compute/metadata v0.5.2 cloud.google.com/go/storage v1.46.0 github.com/fsnotify/fsnotify v1.8.0 - github.com/golangci/golangci-lint v1.61.0 + github.com/golangci/golangci-lint v1.62.0 github.com/google/addlicense v1.1.1 github.com/google/go-cmp v0.6.0 github.com/google/go-containerregistry v0.20.2 @@ -28,17 +28,17 @@ require ( github.com/sigstore/sigstore/pkg/signature/kms/hashivault v1.8.10 github.com/spiffe/go-spiffe/v2 v2.4.0 github.com/stretchr/testify v1.9.0 - github.com/tektoncd/pipeline v0.65.0 + github.com/tektoncd/pipeline v0.65.1 github.com/tektoncd/plumbing v0.0.0-20230907180608-5625252a2de1 go.opencensus.io v0.24.0 go.uber.org/zap v1.27.0 gocloud.dev v0.40.0 gocloud.dev/docstore/mongodocstore v0.40.0 gocloud.dev/pubsub/kafkapubsub v0.40.0 - golang.org/x/crypto v0.28.0 - golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e - google.golang.org/grpc v1.67.1 - google.golang.org/protobuf v1.35.1 + golang.org/x/crypto v0.29.0 + golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 + google.golang.org/grpc v1.68.0 + google.golang.org/protobuf v1.35.2 k8s.io/api v0.31.2 k8s.io/apimachinery v0.31.2 k8s.io/client-go v0.31.2 @@ -63,11 +63,11 @@ require ( contrib.go.opencensus.io/exporter/prometheus v0.4.2 // indirect filippo.io/edwards25519 v1.1.0 // indirect github.com/4meepo/tagalign v1.3.4 // indirect - github.com/Abirdcfly/dupword v0.1.1 // indirect + github.com/Abirdcfly/dupword v0.1.3 // indirect github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0 // indirect - github.com/Antonboom/errname v0.1.13 // indirect - github.com/Antonboom/nilnil v0.1.9 // indirect - github.com/Antonboom/testifylint v1.4.3 // indirect + github.com/Antonboom/errname v1.0.0 // indirect + github.com/Antonboom/nilnil v1.0.0 // indirect + github.com/Antonboom/testifylint v1.5.0 // indirect github.com/Azure/azure-sdk-for-go v68.0.0+incompatible // indirect github.com/Azure/azure-sdk-for-go/sdk/azcore v1.15.0 // indirect github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 // indirect @@ -96,8 +96,8 @@ require ( github.com/OpenPeeDeeP/depguard/v2 v2.2.0 // indirect github.com/ProtonMail/go-crypto v1.0.0 // indirect github.com/ThalesIgnite/crypto11 v1.2.5 // indirect - github.com/alecthomas/go-check-sumtype v0.1.4 // indirect - github.com/alexkohler/nakedret/v2 v2.0.4 // indirect + github.com/alecthomas/go-check-sumtype v0.2.0 // indirect + github.com/alexkohler/nakedret/v2 v2.0.5 // indirect github.com/alexkohler/prealloc v1.0.0 // indirect github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4 // indirect github.com/alibabacloud-go/cr-20160607 v1.0.1 // indirect @@ -134,15 +134,15 @@ require ( github.com/aws/smithy-go v1.22.0 // indirect github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20231024185945-8841054dbdb8 // indirect github.com/beorn7/perks v1.0.1 // indirect - github.com/bkielbasa/cyclop v1.2.1 // indirect + github.com/bkielbasa/cyclop v1.2.3 // indirect github.com/blang/semver v3.5.1+incompatible // indirect github.com/blang/semver/v4 v4.0.0 // indirect github.com/blendle/zapdriver v1.3.1 // indirect github.com/blizzy78/varnamelen v0.8.0 // indirect github.com/bmatcuk/doublestar/v4 v4.0.2 // indirect github.com/bombsimon/wsl/v4 v4.4.1 // indirect - github.com/breml/bidichk v0.2.7 // indirect - github.com/breml/errchkjson v0.3.6 // indirect + github.com/breml/bidichk v0.3.2 // indirect + github.com/breml/errchkjson v0.4.0 // indirect github.com/buildkite/agent/v3 v3.81.0 // indirect github.com/buildkite/go-pipeline v0.13.1 // indirect github.com/buildkite/interpolate v0.1.3 // indirect @@ -157,7 +157,7 @@ require ( github.com/charithe/durationcheck v0.0.10 // indirect github.com/chavacava/garif v0.1.0 // indirect github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 // indirect - github.com/ckaznocha/intrange v0.2.0 // indirect + github.com/ckaznocha/intrange v0.2.1 // indirect github.com/clbanning/mxj/v2 v2.7.0 // indirect github.com/cloudevents/sdk-go/v2 v2.15.2 // indirect github.com/cloudflare/circl v1.3.7 // indirect @@ -186,15 +186,15 @@ require ( github.com/envoyproxy/protoc-gen-validate v1.1.0 // indirect github.com/ettle/strcase v0.2.0 // indirect github.com/evanphx/json-patch/v5 v5.9.0 // indirect - github.com/fatih/color v1.17.0 // indirect + github.com/fatih/color v1.18.0 // indirect github.com/fatih/structtag v1.2.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/firefart/nonamedreturns v1.0.5 // indirect github.com/fxamacker/cbor/v2 v2.7.0 // indirect github.com/fzipp/gocyclo v0.6.0 // indirect - github.com/ghostiam/protogetter v0.3.6 // indirect + github.com/ghostiam/protogetter v0.3.8 // indirect github.com/go-chi/chi v4.1.2+incompatible // indirect - github.com/go-critic/go-critic v0.11.4 // indirect + github.com/go-critic/go-critic v0.11.5 // indirect github.com/go-jose/go-jose/v3 v3.0.3 // indirect github.com/go-jose/go-jose/v4 v4.0.4 // indirect github.com/go-kit/log v0.2.1 // indirect @@ -218,7 +218,7 @@ require ( github.com/go-toolsmith/astp v1.1.0 // indirect github.com/go-toolsmith/strparse v1.1.0 // indirect github.com/go-toolsmith/typep v1.1.0 // indirect - github.com/go-viper/mapstructure/v2 v2.1.0 // indirect + github.com/go-viper/mapstructure/v2 v2.2.1 // indirect github.com/go-xmlfmt/xmlfmt v1.1.2 // indirect github.com/gobwas/glob v0.2.3 // indirect github.com/gofrs/flock v0.12.1 // indirect @@ -229,6 +229,7 @@ require ( github.com/golang/protobuf v1.5.4 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a // indirect + github.com/golangci/go-printf-func-name v0.1.0 // indirect github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9 // indirect github.com/golangci/misspell v0.6.0 // indirect github.com/golangci/modinfo v0.3.4 // indirect @@ -279,7 +280,6 @@ require ( github.com/jellydator/ttlcache/v3 v3.3.0 // indirect github.com/jgautheron/goconst v1.7.1 // indirect github.com/jingyugao/rowserrcheck v1.1.1 // indirect - github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af // indirect github.com/jjti/go-spancheck v0.6.2 // indirect github.com/jmespath/go-jmespath v0.4.0 // indirect github.com/josharian/intern v1.0.0 // indirect @@ -288,19 +288,18 @@ require ( github.com/karamaru-alpha/copyloopvar v1.1.0 // indirect github.com/kelseyhightower/envconfig v1.4.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect - github.com/kisielk/errcheck v1.7.0 // indirect + github.com/kisielk/errcheck v1.8.0 // indirect github.com/kkHAIKE/contextcheck v1.1.5 // indirect github.com/klauspost/compress v1.17.9 // indirect github.com/kulti/thelper v0.6.3 // indirect github.com/kunwardeep/paralleltest v1.0.10 // indirect github.com/kylelemons/godebug v1.1.0 // indirect github.com/kyoh86/exportloopref v0.1.11 // indirect - github.com/lasiar/canonicalheader v1.1.1 // indirect + github.com/lasiar/canonicalheader v1.1.2 // indirect github.com/ldez/gomoddirectives v0.2.4 // indirect github.com/ldez/tagliatelle v0.5.0 // indirect github.com/leonklingele/grouper v1.1.2 // indirect github.com/letsencrypt/boulder v0.0.0-20240620165639-de9c06129bec // indirect - github.com/lufeee/execinquery v1.2.1 // indirect github.com/macabu/inamedparam v0.1.3 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mailru/easyjson v0.7.7 // indirect @@ -309,8 +308,8 @@ require ( github.com/matoous/godox v0.0.0-20230222163458-006bad1f9d26 // indirect github.com/mattn/go-colorable v0.1.13 // indirect github.com/mattn/go-isatty v0.0.20 // indirect - github.com/mattn/go-runewidth v0.0.15 // indirect - github.com/mgechev/revive v1.3.9 // indirect + github.com/mattn/go-runewidth v0.0.16 // indirect + github.com/mgechev/revive v1.5.0 // indirect github.com/miekg/pkcs11 v1.1.1 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect @@ -324,7 +323,7 @@ require ( github.com/nishanths/exhaustive v0.12.0 // indirect github.com/nishanths/predeclared v0.2.2 // indirect github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 // indirect - github.com/nunnatsa/ginkgolinter v0.16.2 // indirect + github.com/nunnatsa/ginkgolinter v0.18.0 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/oleiade/reflections v1.1.0 // indirect github.com/olekukonko/tablewriter v0.0.5 // indirect @@ -348,8 +347,10 @@ require ( github.com/quasilyte/gogrep v0.5.0 // indirect github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 // indirect github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 // indirect + github.com/raeperd/recvcheck v0.1.2 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect - github.com/rivo/uniseg v0.4.4 // indirect + github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.13.1 // indirect github.com/ryancurrah/gomodguard v1.3.5 // indirect github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect github.com/ryanuber/go-glob v1.0.0 // indirect @@ -360,7 +361,7 @@ require ( github.com/sashamelentyev/interfacebloat v1.1.0 // indirect github.com/sashamelentyev/usestdlibvars v1.27.0 // indirect github.com/sassoftware/relic v7.2.1+incompatible // indirect - github.com/securego/gosec/v2 v2.21.2 // indirect + github.com/securego/gosec/v2 v2.21.4 // indirect github.com/segmentio/ksuid v1.0.4 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/shazow/go-diff v0.0.0-20160112020656-b6b7b6733b8c // indirect @@ -370,9 +371,9 @@ require ( github.com/sigstore/timestamp-authority v1.2.2 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/sivchari/containedctx v1.0.3 // indirect - github.com/sivchari/tenv v1.10.0 // indirect + github.com/sivchari/tenv v1.12.1 // indirect github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect - github.com/sonatard/noctx v0.0.2 // indirect + github.com/sonatard/noctx v0.1.0 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/go-diff v0.7.0 // indirect github.com/spf13/afero v1.11.0 // indirect @@ -388,11 +389,11 @@ require ( github.com/subosito/gotenv v1.6.0 // indirect github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect github.com/tdakkota/asciicheck v0.2.0 // indirect - github.com/tetafro/godot v1.4.17 // indirect + github.com/tetafro/godot v1.4.18 // indirect github.com/thales-e-security/pool v0.0.2 // indirect github.com/theupdateframework/go-tuf v0.7.0 // indirect github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 // indirect - github.com/timonwong/loggercheck v0.9.4 // indirect + github.com/timonwong/loggercheck v0.10.1 // indirect github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 // indirect github.com/tjfoc/gmsm v1.4.1 // indirect github.com/tomarrell/wrapcheck/v2 v2.9.0 // indirect @@ -401,6 +402,7 @@ require ( github.com/ultraware/funlen v0.1.0 // indirect github.com/ultraware/whitespace v0.1.1 // indirect github.com/uudashr/gocognit v1.1.3 // indirect + github.com/uudashr/iface v1.2.0 // indirect github.com/vbatts/tar-split v0.11.5 // indirect github.com/x448/float16 v0.8.4 // indirect github.com/xanzy/go-gitlab v0.109.0 // indirect @@ -415,7 +417,7 @@ require ( github.com/youmark/pkcs8 v0.0.0-20240726163527-a2c0da244d78 // indirect github.com/zeebo/errs v1.3.0 // indirect gitlab.com/bosi/decorder v0.4.2 // indirect - go-simpler.org/musttag v0.12.2 // indirect + go-simpler.org/musttag v0.13.0 // indirect go-simpler.org/sloglint v0.7.2 // indirect go.mongodb.org/mongo-driver v1.16.1 // indirect go.opentelemetry.io/contrib/detectors/gcp v1.29.0 // indirect @@ -427,18 +429,18 @@ require ( go.opentelemetry.io/otel/sdk/metric v1.29.0 // indirect go.opentelemetry.io/otel/trace v1.29.0 // indirect go.step.sm/crypto v0.51.2 // indirect - go.uber.org/automaxprocs v1.5.3 // indirect + go.uber.org/automaxprocs v1.6.0 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f // indirect - golang.org/x/mod v0.21.0 // indirect - golang.org/x/net v0.30.0 // indirect + golang.org/x/exp/typeparams v0.0.0-20240909161429-701f63a606c0 // indirect + golang.org/x/mod v0.22.0 // indirect + golang.org/x/net v0.31.0 // indirect golang.org/x/oauth2 v0.23.0 // indirect - golang.org/x/sync v0.8.0 // indirect - golang.org/x/sys v0.26.0 // indirect - golang.org/x/term v0.25.0 // indirect - golang.org/x/text v0.19.0 // indirect + golang.org/x/sync v0.9.0 // indirect + golang.org/x/sys v0.27.0 // indirect + golang.org/x/term v0.26.0 // indirect + golang.org/x/text v0.20.0 // indirect golang.org/x/time v0.7.0 // indirect - golang.org/x/tools v0.24.0 // indirect + golang.org/x/tools v0.27.0 // indirect golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect gomodules.xyz/jsonpatch/v2 v2.4.0 // indirect google.golang.org/api v0.203.0 // indirect diff --git a/go.sum b/go.sum index 201a398486..4f15b23b7b 100644 --- a/go.sum +++ b/go.sum @@ -105,18 +105,18 @@ filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/4meepo/tagalign v1.3.4 h1:P51VcvBnf04YkHzjfclN6BbsopfJR5rxs1n+5zHt+w8= github.com/4meepo/tagalign v1.3.4/go.mod h1:M+pnkHH2vG8+qhE5bVc/zeP7HS/j910Fwa9TUSyZVI0= -github.com/Abirdcfly/dupword v0.1.1 h1:Bsxe0fIw6OwBtXMIncaTxCLHYO5BB+3mcsR5E8VXloY= -github.com/Abirdcfly/dupword v0.1.1/go.mod h1:B49AcJdTYYkpd4HjgAcutNGG9HZ2JWwKunH9Y2BA6sM= +github.com/Abirdcfly/dupword v0.1.3 h1:9Pa1NuAsZvpFPi9Pqkd93I7LIYRURj+A//dFd5tgBeE= +github.com/Abirdcfly/dupword v0.1.3/go.mod h1:8VbB2t7e10KRNdwTVoxdBaxla6avbhGzb8sCTygUMhw= github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230919221257-8b5d3ce2d11d h1:zjqpY4C7H15HjRPEenkS4SAn3Jy2eRRjkjZbGR30TOg= github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230919221257-8b5d3ce2d11d/go.mod h1:XNqJ7hv2kY++g8XEHREpi+JqZo3+0l+CH2egBVN4yqM= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0 h1:kcnfY4vljxXliXDBrA9K9lwF8IoEZ4Up6Eg9kWTIm28= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0/go.mod h1:tlqp9mUGbsP+0z3Q+c0Q5MgSdq/OMwQhm5bffR3Q3ss= -github.com/Antonboom/errname v0.1.13 h1:JHICqsewj/fNckzrfVSe+T33svwQxmjC+1ntDsHOVvM= -github.com/Antonboom/errname v0.1.13/go.mod h1:uWyefRYRN54lBg6HseYCFhs6Qjcy41Y3Jl/dVhA87Ns= -github.com/Antonboom/nilnil v0.1.9 h1:eKFMejSxPSA9eLSensFmjW2XTgTwJMjZ8hUHtV4s/SQ= -github.com/Antonboom/nilnil v0.1.9/go.mod h1:iGe2rYwCq5/Me1khrysB4nwI7swQvjclR8/YRPl5ihQ= -github.com/Antonboom/testifylint v1.4.3 h1:ohMt6AHuHgttaQ1xb6SSnxCeK4/rnK7KKzbvs7DmEck= -github.com/Antonboom/testifylint v1.4.3/go.mod h1:+8Q9+AOLsz5ZiQiiYujJKs9mNz398+M6UgslP4qgJLA= +github.com/Antonboom/errname v1.0.0 h1:oJOOWR07vS1kRusl6YRSlat7HFnb3mSfMl6sDMRoTBA= +github.com/Antonboom/errname v1.0.0/go.mod h1:gMOBFzK/vrTiXN9Oh+HFs+e6Ndl0eTFbtsRTSRdXyGI= +github.com/Antonboom/nilnil v1.0.0 h1:n+v+B12dsE5tbAqRODXmEKfZv9j2KcTBrp+LkoM4HZk= +github.com/Antonboom/nilnil v1.0.0/go.mod h1:fDJ1FSFoLN6yoG65ANb1WihItf6qt9PJVTn/s2IrcII= +github.com/Antonboom/testifylint v1.5.0 h1:dlUIsDMtCrZWUnvkaCz3quJCoIjaGi41GzjPBGkkJ8A= +github.com/Antonboom/testifylint v1.5.0/go.mod h1:wqaJbu0Blb5Wag2wv7Z5xt+CIV+eVLxtGZrlK13z3AE= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.15.0 h1:eXzkOEXbSTOa7cJ7EqeCVi/OFi/ppDrUtQuttCWy74c= @@ -200,8 +200,8 @@ github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBb github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alecthomas/assert/v2 v2.2.2 h1:Z/iVC0xZfWTaFNE6bA3z07T86hd45Xe2eLt6WVy2bbk= github.com/alecthomas/assert/v2 v2.2.2/go.mod h1:pXcQ2Asjp247dahGEmsZ6ru0UVwnkhktn7S0bBDLxvQ= -github.com/alecthomas/go-check-sumtype v0.1.4 h1:WCvlB3l5Vq5dZQTFmodqL2g68uHiSwwlWcT5a2FGK0c= -github.com/alecthomas/go-check-sumtype v0.1.4/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= +github.com/alecthomas/go-check-sumtype v0.2.0 h1:Bo+e4DFf3rs7ME9w/0SU/g6nmzJaphduP8Cjiz0gbwY= +github.com/alecthomas/go-check-sumtype v0.2.0/go.mod h1:WyYPfhfkdhyrdaligV6svFopZV8Lqdzn5pyVBaV6jhQ= github.com/alecthomas/repr v0.2.0 h1:HAzS41CIzNW5syS8Mf9UwXhNH1J9aix/BvDRf1Ml2Yk= github.com/alecthomas/repr v0.2.0/go.mod h1:Fr0507jx4eOXV7AlPV6AVZLYrLIuIeSOWtW57eE/O/4= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -212,8 +212,8 @@ github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk5 github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE= github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30= -github.com/alexkohler/nakedret/v2 v2.0.4 h1:yZuKmjqGi0pSmjGpOC016LtPJysIL0WEUiaXW5SUnNg= -github.com/alexkohler/nakedret/v2 v2.0.4/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= +github.com/alexkohler/nakedret/v2 v2.0.5 h1:fP5qLgtwbx9EJE8dGEERT02YwS8En4r9nnZ71RK+EVU= +github.com/alexkohler/nakedret/v2 v2.0.5/go.mod h1:bF5i0zF2Wo2o4X4USt9ntUWve6JbFv02Ff4vlkmS/VU= github.com/alexkohler/prealloc v1.0.0 h1:Hbq0/3fJPQhNkN0dR95AVrr6R7tou91y0uHG5pOcUuw= github.com/alexkohler/prealloc v1.0.0/go.mod h1:VetnK3dIgFBBKmg0YnD9F9x6Icjd+9cvfHR56wJVlKE= github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.2/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= @@ -320,8 +320,8 @@ github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+Ce github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= -github.com/bkielbasa/cyclop v1.2.1 h1:AeF71HZDob1P2/pRm1so9cd1alZnrpyc4q2uP2l0gJY= -github.com/bkielbasa/cyclop v1.2.1/go.mod h1:K/dT/M0FPAiYjBgQGau7tz+3TMh4FWAEqlMhzFWCrgM= +github.com/bkielbasa/cyclop v1.2.3 h1:faIVMIGDIANuGPWH031CZJTi2ymOQBULs9H21HSMa5w= +github.com/bkielbasa/cyclop v1.2.3/go.mod h1:kHTwA9Q0uZqOADdupvcFJQtp/ksSnytRMe8ztxG8Fuo= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM= @@ -336,10 +336,10 @@ github.com/bmatcuk/doublestar/v4 v4.0.2 h1:X0krlUVAVmtr2cRoTqR8aDMrDqnB36ht8wpWT github.com/bmatcuk/doublestar/v4 v4.0.2/go.mod h1:xBQ8jztBU6kakFMg+8WGxn0c6z1fTSPVIjEY1Wr7jzc= github.com/bombsimon/wsl/v4 v4.4.1 h1:jfUaCkN+aUpobrMO24zwyAMwMAV5eSziCkOKEauOLdw= github.com/bombsimon/wsl/v4 v4.4.1/go.mod h1:Xu/kDxGZTofQcDGCtQe9KCzhHphIe0fDuyWTxER9Feo= -github.com/breml/bidichk v0.2.7 h1:dAkKQPLl/Qrk7hnP6P+E0xOodrq8Us7+U0o4UBOAlQY= -github.com/breml/bidichk v0.2.7/go.mod h1:YodjipAGI9fGcYM7II6wFvGhdMYsC5pHDlGzqvEW3tQ= -github.com/breml/errchkjson v0.3.6 h1:VLhVkqSBH96AvXEyclMR37rZslRrY2kcyq+31HCsVrA= -github.com/breml/errchkjson v0.3.6/go.mod h1:jhSDoFheAF2RSDOlCfhHO9KqhZgAYLyvHe7bRCX8f/U= +github.com/breml/bidichk v0.3.2 h1:xV4flJ9V5xWTqxL+/PMFF6dtJPvZLPsyixAoPe8BGJs= +github.com/breml/bidichk v0.3.2/go.mod h1:VzFLBxuYtT23z5+iVkamXO386OB+/sVwZOpIj6zXGos= +github.com/breml/errchkjson v0.4.0 h1:gftf6uWZMtIa/Is3XJgibewBm2ksAQSY/kABDNFTAdk= +github.com/breml/errchkjson v0.4.0/go.mod h1:AuBOSTHyLSaaAFlWsRSuRBIroCh3eh7ZHh5YeelDIk8= github.com/buildkite/agent/v3 v3.81.0 h1:JVfkng2XnsXesFXwiFwLJFkuzVu4zvoJCvedfoIXD6E= github.com/buildkite/agent/v3 v3.81.0/go.mod h1:edJeyycODRxaFvpT22rDGwaQ5oa4eB8GjtbjgX5VpFw= github.com/buildkite/go-pipeline v0.13.1 h1:Y9p8pQIwPtauVwNrcmTDH6+XK7jE1nLuvWVaK8oymA8= @@ -376,8 +376,8 @@ github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb2 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/ckaznocha/intrange v0.2.0 h1:FykcZuJ8BD7oX93YbO1UY9oZtkRbp+1/kJcDjkefYLs= -github.com/ckaznocha/intrange v0.2.0/go.mod h1:r5I7nUlAAG56xmkOpw4XVr16BXhwYTUdcuRFeevn1oE= +github.com/ckaznocha/intrange v0.2.1 h1:M07spnNEQoALOJhwrImSrJLaxwuiQK+hA2DeajBlwYk= +github.com/ckaznocha/intrange v0.2.1/go.mod h1:7NEhVyf8fzZO5Ds7CRaqPEm52Ut83hsTiL5zbER/HYk= github.com/clbanning/mxj/v2 v2.5.5/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= @@ -489,8 +489,8 @@ github.com/evanphx/json-patch v4.12.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQL github.com/evanphx/json-patch/v5 v5.9.0 h1:kcBlZQbplgElYIlo/n1hJbls2z/1awpXxpRi0/FOJfg= github.com/evanphx/json-patch/v5 v5.9.0/go.mod h1:VNkHZ/282BpEyt/tObQO8s5CMPmYYq14uClGH4abBuQ= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= -github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= -github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= +github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= +github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4= github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= @@ -512,15 +512,15 @@ github.com/fxamacker/cbor/v2 v2.7.0/go.mod h1:pxXPTn3joSm21Gbwsv0w9OSA2y1HFR9qXE github.com/fzipp/gocyclo v0.6.0 h1:lsblElZG7d3ALtGMx9fmxeTKZaLLpU8mET09yN4BBLo= github.com/fzipp/gocyclo v0.6.0/go.mod h1:rXPyn8fnlpa0R2csP/31uerbiVBugk5whMdlyaLkLoA= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= -github.com/ghostiam/protogetter v0.3.6 h1:R7qEWaSgFCsy20yYHNIJsU9ZOb8TziSRRxuAOTVKeOk= -github.com/ghostiam/protogetter v0.3.6/go.mod h1:7lpeDnEJ1ZjL/YtyoN99ljO4z0pd3H0d18/t2dPBxHw= +github.com/ghostiam/protogetter v0.3.8 h1:LYcXbYvybUyTIxN2Mj9h6rHrDZBDwZloPoKctWrFyJY= +github.com/ghostiam/protogetter v0.3.8/go.mod h1:WZ0nw9pfzsgxuRsPOFQomgDVSWtDLJRfQJEhsGbmQMA= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE= github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= -github.com/go-critic/go-critic v0.11.4 h1:O7kGOCx0NDIni4czrkRIXTnit0mkyKOCePh3My6OyEU= -github.com/go-critic/go-critic v0.11.4/go.mod h1:2QAdo4iuLik5S9YG0rT4wcZ8QxwHYkrr6/2MWAiv/vc= +github.com/go-critic/go-critic v0.11.5 h1:TkDTOn5v7EEngMxu8KbuFqFR43USaaH8XRJLz1jhVYA= +github.com/go-critic/go-critic v0.11.5/go.mod h1:wu6U7ny9PiaHaZHcvMDmdysMqvDem162Rh3zWTrqk8M= github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI= github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= @@ -608,8 +608,8 @@ github.com/go-toolsmith/strparse v1.1.0 h1:GAioeZUK9TGxnLS+qfdqNbA4z0SSm5zVNtCQi github.com/go-toolsmith/strparse v1.1.0/go.mod h1:7ksGy58fsaQkGQlY8WVoBFNyEPMGuJin1rfoPS4lBSQ= github.com/go-toolsmith/typep v1.1.0 h1:fIRYDyF+JywLfqzyhdiHzRop/GQDxxNhLGQ6gFUNHus= github.com/go-toolsmith/typep v1.1.0/go.mod h1:fVIw+7zjdsMxDA3ITWnH1yOiw1rnTQKCsF/sk2H/qig= -github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w= -github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= +github.com/go-viper/mapstructure/v2 v2.2.1 h1:ZAaOCxANMuZx5RCeg0mBdEZk7DZasvvZIxtHqx8aGss= +github.com/go-viper/mapstructure/v2 v2.2.1/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM= github.com/go-xmlfmt/xmlfmt v1.1.2 h1:Nea7b4icn8s57fTx1M5AI4qQT5HEM3rVUO8MuE6g80U= github.com/go-xmlfmt/xmlfmt v1.1.2/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= @@ -667,10 +667,12 @@ github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a h1:w8hkcTqaFpzKqonE9uMCefW1WDie15eSP/4MssdenaM= github.com/golangci/dupl v0.0.0-20180902072040-3e9179ac440a/go.mod h1:ryS0uhF+x9jgbj/N71xsEqODy9BN81/GonCZiOzirOk= +github.com/golangci/go-printf-func-name v0.1.0 h1:dVokQP+NMTO7jwO4bwsRwLWeudOVUPPyAKJuzv8pEJU= +github.com/golangci/go-printf-func-name v0.1.0/go.mod h1:wqhWFH5mUdJQhweRnldEywnR5021wTdZSNgwYceV14s= github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9 h1:/1322Qns6BtQxUZDTAT4SdcoxknUki7IAoK4SAXr8ME= github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9/go.mod h1:Oesb/0uFAyWoaw1U1qS5zyjCg5NP9C9iwjnI4tIsXEE= -github.com/golangci/golangci-lint v1.61.0 h1:VvbOLaRVWmyxCnUIMTbf1kDsaJbTzH20FAMXTAlQGu8= -github.com/golangci/golangci-lint v1.61.0/go.mod h1:e4lztIrJJgLPhWvFPDkhiMwEFRrWlmFbrZea3FsJyN8= +github.com/golangci/golangci-lint v1.62.0 h1:/G0g+bi1BhmGJqLdNQkKBWjcim8HjOPc4tsKuHDOhcI= +github.com/golangci/golangci-lint v1.62.0/go.mod h1:jtoOhQcKTz8B6dGNFyfQV3WZkQk+YvBDewDtNpiAJts= github.com/golangci/misspell v0.6.0 h1:JCle2HUTNWirNlDIAUO44hUsKhOFqGPoC4LZxlaSXDs= github.com/golangci/misspell v0.6.0/go.mod h1:keMNyY6R9isGaSAu+4Q8NMBwMPkh15Gtc8UCVoDtAWo= github.com/golangci/modinfo v0.3.4 h1:oU5huX3fbxqQXdfspamej74DFX0kyGLkw1ppvXoJ8GA= @@ -891,8 +893,6 @@ github.com/jgautheron/goconst v1.7.1 h1:VpdAG7Ca7yvvJk5n8dMwQhfEZJh95kl/Hl9S1OI5 github.com/jgautheron/goconst v1.7.1/go.mod h1:aAosetZ5zaeC/2EfMeRswtxUFBpe2Hr7HzkgX4fanO4= github.com/jingyugao/rowserrcheck v1.1.1 h1:zibz55j/MJtLsjP1OF4bSdgXxwL1b+Vn7Tjzq7gFzUs= github.com/jingyugao/rowserrcheck v1.1.1/go.mod h1:4yvlZSDb3IyDTUZJUmpZfm2Hwok+Dtp+nu2qOq+er9c= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af h1:KA9BjwUk7KlCh6S9EAGWBt1oExIUv9WyNCiRz5amv48= -github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af/go.mod h1:HEWGJkRDzjJY2sqdDwxccsGicWEf9BQOZsq2tV+xzM0= github.com/jjti/go-spancheck v0.6.2 h1:iYtoxqPMzHUPp7St+5yA8+cONdyXD3ug6KK15n7Pklk= github.com/jjti/go-spancheck v0.6.2/go.mod h1:+X7lvIrR5ZdUTkxFYqzJ0abr8Sb5LOo80uOhWNqIrYA= github.com/jmespath/go-jmespath v0.4.0 h1:BEgLn5cpjn8UN1mAw4NjwDrS35OdebyEtFe+9YPoQUg= @@ -926,8 +926,8 @@ github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6 h1:IsMZxCuZqKuao2vNdfD82fjjgPLfyHLpR41Z88viRWs= github.com/keybase/go-keychain v0.0.0-20231219164618-57a3676c3af6/go.mod h1:3VeWNIJaW+O5xpRQbPp0Ybqu1vJd/pm7s2F473HRrkw= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/errcheck v1.7.0 h1:+SbscKmWJ5mOK/bO1zS60F5I9WwZDWOfRsC4RwfwRV0= -github.com/kisielk/errcheck v1.7.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= +github.com/kisielk/errcheck v1.8.0 h1:ZX/URYa7ilESY19ik/vBmCn6zdGQLxACwjAcWbHlYlg= +github.com/kisielk/errcheck v1.8.0/go.mod h1:1kLL+jV4e+CFfueBmI1dSK2ADDyQnlrnrY/FqKluHJQ= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kkHAIKE/contextcheck v1.1.5 h1:CdnJh63tcDe53vG+RebdpdXJTc9atMgGqdx8LXxiilg= github.com/kkHAIKE/contextcheck v1.1.5/go.mod h1:O930cpht4xb1YQpK+1+AgoM3mFsvxr7uyFptcnWTYUA= @@ -952,8 +952,8 @@ github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0 github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/kyoh86/exportloopref v0.1.11 h1:1Z0bcmTypkL3Q4k+IDHMWTcnCliEZcaPiIe0/ymEyhQ= github.com/kyoh86/exportloopref v0.1.11/go.mod h1:qkV4UF1zGl6EkF1ox8L5t9SwyeBAZ3qLMd6up458uqA= -github.com/lasiar/canonicalheader v1.1.1 h1:wC+dY9ZfiqiPwAexUApFush/csSPXeIi4QqyxXmng8I= -github.com/lasiar/canonicalheader v1.1.1/go.mod h1:cXkb3Dlk6XXy+8MVQnF23CYKWlyA7kfQhSw2CcZtZb0= +github.com/lasiar/canonicalheader v1.1.2 h1:vZ5uqwvDbyJCnMhmFYimgMZnJMjwljN5VGY0VKbMXb4= +github.com/lasiar/canonicalheader v1.1.2/go.mod h1:qJCeLFS0G/QlLQ506T+Fk/fWMa2VmBUiEI2cuMK4djI= github.com/ldez/gomoddirectives v0.2.4 h1:j3YjBIjEBbqZ0NKtBNzr8rtMHTOrLPeiwTkfUJZ3alg= github.com/ldez/gomoddirectives v0.2.4/go.mod h1:oWu9i62VcQDYp9EQ0ONTfqLNh+mDLWWDO+SO0qSQw5g= github.com/ldez/tagliatelle v0.5.0 h1:epgfuYt9v0CG3fms0pEgIMNPuFf/LpPIfjk4kyqSioo= @@ -962,8 +962,6 @@ github.com/leonklingele/grouper v1.1.2 h1:o1ARBDLOmmasUaNDesWqWCIFH3u7hoFlM84Yrj github.com/leonklingele/grouper v1.1.2/go.mod h1:6D0M/HVkhs2yRKRFZUoGjeDy7EZTfFBE9gl4kjmIGkA= github.com/letsencrypt/boulder v0.0.0-20240620165639-de9c06129bec h1:2tTW6cDth2TSgRbAhD7yjZzTQmcN25sDRPEeinR51yQ= github.com/letsencrypt/boulder v0.0.0-20240620165639-de9c06129bec/go.mod h1:TmwEoGCwIti7BCeJ9hescZgRtatxRE+A72pCoPfmcfk= -github.com/lufeee/execinquery v1.2.1 h1:hf0Ems4SHcUGBxpGN7Jz78z1ppVkP/837ZlETPCEtOM= -github.com/lufeee/execinquery v1.2.1/go.mod h1:EC7DrEKView09ocscGHC+apXMIaorh4xqSxS/dy8SbM= github.com/macabu/inamedparam v0.1.3 h1:2tk/phHkMlEL/1GNe/Yf6kkR/hkcUdAEY3L0hjYV1Mk= github.com/macabu/inamedparam v0.1.3/go.mod h1:93FLICAIk/quk7eaPPQvbzihUdn/QkGDwIZEoLtpH6I= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -986,11 +984,11 @@ github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/ github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U= -github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= +github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= +github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= -github.com/mgechev/revive v1.3.9 h1:18Y3R4a2USSBF+QZKFQwVkBROUda7uoBlkEuBD+YD1A= -github.com/mgechev/revive v1.3.9/go.mod h1:+uxEIr5UH0TjXWHTno3xh4u7eg6jDpXKzQccA9UGhHU= +github.com/mgechev/revive v1.5.0 h1:oaSmjA7rP8+HyoRuCgC531VHwnLH1AlJdjj+1AnQceQ= +github.com/mgechev/revive v1.5.0/go.mod h1:L6T3H8EoerRO86c7WuGpvohIUmiploGiyoYbtIWFmV8= github.com/miekg/pkcs11 v1.0.3-0.20190429190417-a667d056470f/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= @@ -1037,8 +1035,8 @@ github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 h1:Up6+btDp321ZG5/zdSLo48H9Iaq0UQGthrhWC6pCxzE= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481/go.mod h1:yKZQO8QE2bHlgozqWDiRVqTFlLQSj30K/6SAK8EeYFw= -github.com/nunnatsa/ginkgolinter v0.16.2 h1:8iLqHIZvN4fTLDC0Ke9tbSZVcyVHoBs0HIbnVSxfHJk= -github.com/nunnatsa/ginkgolinter v0.16.2/go.mod h1:4tWRinDN1FeJgU+iJANW/kz7xKN5nYRAOfJDQUS9dOQ= +github.com/nunnatsa/ginkgolinter v0.18.0 h1:ZXO1wKhPg3A6LpbN5dMuqwhfOjN5c3ous8YdKOuqk9k= +github.com/nunnatsa/ginkgolinter v0.18.0/go.mod h1:vPrWafSULmjMGCMsfGA908if95VnHQNAahvSBOjTuWs= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= @@ -1152,17 +1150,19 @@ github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727 h1:TCg2WBOl github.com/quasilyte/regex/syntax v0.0.0-20210819130434-b3f0c404a727/go.mod h1:rlzQ04UMyJXu/aOvhd8qT+hvDrFpiwqp8MRXDY9szc0= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 h1:M8mH9eK4OUR4lu7Gd+PU1fV2/qnDNfzT635KRSObncs= github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567/go.mod h1:DWNGW8A4Y+GyBgPuaQJuWiy0XYftx4Xm/y5Jqk9I6VQ= +github.com/raeperd/recvcheck v0.1.2 h1:SjdquRsRXJc26eSonWIo8b7IMtKD3OAT2Lb5G3ZX1+4= +github.com/raeperd/recvcheck v0.1.2/go.mod h1:n04eYkwIR0JbgD73wT8wL4JjPC3wm0nFtzBnWNocnYU= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/redis/go-redis/v9 v9.6.1 h1:HHDteefn6ZkTtY5fGUE8tj8uy85AHk6zP7CpzIAM0y4= github.com/redis/go-redis/v9 v9.6.1/go.mod h1:0C0c6ycQsdpVNQpxb1njEQIqkx5UcsM8FJCQLgE9+RA= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis= -github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= +github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= +github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= +github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryancurrah/gomodguard v1.3.5 h1:cShyguSwUEeC0jS7ylOiG/idnd1TpJ1LfHGpV3oJmPU= github.com/ryancurrah/gomodguard v1.3.5/go.mod h1:MXlEPQRxgfPQa62O8wzK3Ozbkv9Rkqr+wKjSxTdsNJE= @@ -1189,8 +1189,8 @@ github.com/sassoftware/relic/v7 v7.6.2 h1:rS44Lbv9G9eXsukknS4mSjIAuuX+lMq/FnStgm github.com/sassoftware/relic/v7 v7.6.2/go.mod h1:kjmP0IBVkJZ6gXeAu35/KCEfca//+PKM6vTAsyDPY+k= github.com/secure-systems-lab/go-securesystemslib v0.8.0 h1:mr5An6X45Kb2nddcFlbmfHkLguCE9laoZCUzEEpIZXA= github.com/secure-systems-lab/go-securesystemslib v0.8.0/go.mod h1:UH2VZVuJfCYR8WgMlCU1uFsOUU+KeyrTWcSS73NBOzU= -github.com/securego/gosec/v2 v2.21.2 h1:deZp5zmYf3TWwU7A7cR2+SolbTpZ3HQiwFqnzQyEl3M= -github.com/securego/gosec/v2 v2.21.2/go.mod h1:au33kg78rNseF5PwPnTWhuYBFf534bvJRvOrgZ/bFzU= +github.com/securego/gosec/v2 v2.21.4 h1:Le8MSj0PDmOnHJgUATjD96PaXRvCpKC+DGJvwyy0Mlk= +github.com/securego/gosec/v2 v2.21.4/go.mod h1:Jtb/MwRQfRxCXyCm1rfM1BEiiiTfUOdyzzAhlr6lUTA= github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -1237,8 +1237,8 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE= github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4= -github.com/sivchari/tenv v1.10.0 h1:g/hzMA+dBCKqGXgW8AV/1xIWhAvDrx0zFKNR48NFMg0= -github.com/sivchari/tenv v1.10.0/go.mod h1:tdY24masnVoZFxYrHv/nD6Tc8FbkEtAQEEziXpyMgqY= +github.com/sivchari/tenv v1.12.1 h1:+E0QzjktdnExv/wwsnnyk4oqZBUfuh89YMQT1cyuvSY= +github.com/sivchari/tenv v1.12.1/go.mod h1:1LjSOUCc25snIr5n3DtGGrENhX3LuWefcplwVGC24mw= github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= @@ -1248,8 +1248,8 @@ github.com/smallstep/assert v0.0.0-20200723003110-82e2b9b3b262/go.mod h1:MyOHs9P github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v1.1.0/go.mod h1:tcbTF8ujkAEcZ8TElKY+i30BzYlVhC/LOxJk7iOWnoo= github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/sonatard/noctx v0.0.2 h1:L7Dz4De2zDQhW8S0t+KUjY0MAQJd6SgVwhzNIc4ok00= -github.com/sonatard/noctx v0.0.2/go.mod h1:kzFz+CzWSjQ2OzIm46uJZoXuBpa2+0y3T36U18dWqIo= +github.com/sonatard/noctx v0.1.0 h1:JjqOc2WN16ISWAjAk8M5ej0RfExEXtkEyExl2hLW+OM= +github.com/sonatard/noctx v0.1.0/go.mod h1:0RvBxqY8D4j9cTTTWE8ylt2vqj2EPI8fHmrxHdsaZ2c= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0= @@ -1308,16 +1308,16 @@ github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BG github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= github.com/tdakkota/asciicheck v0.2.0 h1:o8jvnUANo0qXtnslk2d3nMKTFNlOnJjRrNcj0j9qkHM= github.com/tdakkota/asciicheck v0.2.0/go.mod h1:Qb7Y9EgjCLJGup51gDHFzbI08/gbGhL/UVhYIPWG2rg= -github.com/tektoncd/pipeline v0.65.0 h1:MIXXt/OeV/SLQ0KYXXBzPHBwXe2iIhgZcJBHkkgzaYY= -github.com/tektoncd/pipeline v0.65.0/go.mod h1:V3cyfxxc7b3GLT2a13GX2mWA86qmxWhh4mOp4gfFQwQ= +github.com/tektoncd/pipeline v0.65.1 h1:7Ee/nqG+QWE25NGzwKZdFE0p5COb/aljfDysUFv8+0o= +github.com/tektoncd/pipeline v0.65.1/go.mod h1:V3cyfxxc7b3GLT2a13GX2mWA86qmxWhh4mOp4gfFQwQ= github.com/tektoncd/plumbing v0.0.0-20230907180608-5625252a2de1 h1:9paprRIBXQgcvdhGq3wKiSspXP0JIFSY52ru3sIMjKM= github.com/tektoncd/plumbing v0.0.0-20230907180608-5625252a2de1/go.mod h1:7eWs1XNkmReggow7ggRbRyRuHi7646B8b2XipCZ3VOw= github.com/tenntenn/modver v1.0.1 h1:2klLppGhDgzJrScMpkj9Ujy3rXPUspSjAcev9tSEBgA= github.com/tenntenn/modver v1.0.1/go.mod h1:bePIyQPb7UeioSRkw3Q0XeMhYZSMx9B8ePqg6SAMGH0= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3 h1:f+jULpRQGxTSkNYKJ51yaw6ChIqO+Je8UqsTKN/cDag= github.com/tenntenn/text/transform v0.0.0-20200319021203-7eef512accb3/go.mod h1:ON8b8w4BN/kE1EOhwT0o+d62W65a6aPw1nouo9LMgyY= -github.com/tetafro/godot v1.4.17 h1:pGzu+Ye7ZUEFx7LHU0dAKmCOXWsPjl7qA6iMGndsjPs= -github.com/tetafro/godot v1.4.17/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= +github.com/tetafro/godot v1.4.18 h1:ouX3XGiziKDypbpXqShBfnNLTSjR8r3/HVzrtJ+bHlI= +github.com/tetafro/godot v1.4.18/go.mod h1:2oVxTBSftRTh4+MVfUaUXR6bn2GDXCaMcOG4Dk3rfio= github.com/thales-e-security/pool v0.0.2 h1:RAPs4q2EbWsTit6tpzuvTFlgFRJ3S8Evf5gtvVDbmPg= github.com/thales-e-security/pool v0.0.2/go.mod h1:qtpMm2+thHtqhLzTwgDBj/OuNnMpupY8mv0Phz0gjhU= github.com/theupdateframework/go-tuf v0.7.0 h1:CqbQFrWo1ae3/I0UCblSbczevCCbS31Qvs5LdxRWqRI= @@ -1326,8 +1326,8 @@ github.com/theupdateframework/go-tuf/v2 v2.0.1 h1:11p9tXpq10KQEujxjcIjDSivMKCMLg github.com/theupdateframework/go-tuf/v2 v2.0.1/go.mod h1:baB22nBHeHBCeuGZcIlctNq4P61PcOdyARlplg5xmLA= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 h1:quvGphlmUVU+nhpFa4gg4yJyTRJ13reZMDHrKwYw53M= github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966/go.mod h1:27bSVNWSBOHm+qRp1T9qzaIpsWEP6TbUnei/43HK+PQ= -github.com/timonwong/loggercheck v0.9.4 h1:HKKhqrjcVj8sxL7K77beXh0adEm6DLjV/QOGeMXEVi4= -github.com/timonwong/loggercheck v0.9.4/go.mod h1:caz4zlPcgvpEkXgVnAJGowHAMW2NwHaNlpS8xDbVhTg= +github.com/timonwong/loggercheck v0.10.1 h1:uVZYClxQFpw55eh+PIoqM7uAOHMrhVcDoWDery9R8Lg= +github.com/timonwong/loggercheck v0.10.1/go.mod h1:HEAWU8djynujaAVX7QI65Myb8qgfcZ1uKbdpg3ZzKl8= github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 h1:e/5i7d4oYZ+C1wj2THlRK+oAhjeS/TRQwMfkIuet3w0= github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399/go.mod h1:LdwHTNJT99C5fTAzDz0ud328OgXz+gierycbcIx2fRs= github.com/tjfoc/gmsm v1.3.2/go.mod h1:HaUcFuY0auTiaHB9MHFGCPx5IaLhTUd2atbCFBQXn9w= @@ -1345,6 +1345,8 @@ github.com/ultraware/whitespace v0.1.1 h1:bTPOGejYFulW3PkcrqkeQwOd6NKOOXvmGD9bo/ github.com/ultraware/whitespace v0.1.1/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8= github.com/uudashr/gocognit v1.1.3 h1:l+a111VcDbKfynh+airAy/DJQKaXh2m9vkoysMPSZyM= github.com/uudashr/gocognit v1.1.3/go.mod h1:aKH8/e8xbTRBwjbCkwZ8qt4l2EpKXl31KMHgSS+lZ2U= +github.com/uudashr/iface v1.2.0 h1:ECJjh5q/1Zmnv/2yFpWV6H3oMg5+Mo+vL0aqw9Gjazo= +github.com/uudashr/iface v1.2.0/go.mod h1:Ux/7d/rAF3owK4m53cTVXL4YoVHKNqnoOeQHn2xrlp0= github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/vbatts/tar-split v0.11.5 h1:3bHCTIheBm1qFTcgh9oPu+nNBtX+XJIupG/vacinCts= @@ -1404,8 +1406,8 @@ gitlab.com/bosi/decorder v0.4.2 h1:qbQaV3zgwnBZ4zPMhGLW4KZe7A7NwxEhJx39R3shffo= gitlab.com/bosi/decorder v0.4.2/go.mod h1:muuhHoaJkA9QLcYHq4Mj8FJUwDZ+EirSHRiaTcTf6T8= go-simpler.org/assert v0.9.0 h1:PfpmcSvL7yAnWyChSjOz6Sp6m9j5lyK8Ok9pEL31YkQ= go-simpler.org/assert v0.9.0/go.mod h1:74Eqh5eI6vCK6Y5l3PI8ZYFXG4Sa+tkr70OIPJAUr28= -go-simpler.org/musttag v0.12.2 h1:J7lRc2ysXOq7eM8rwaTYnNrHd5JwjppzB6mScysB2Cs= -go-simpler.org/musttag v0.12.2/go.mod h1:uN1DVIasMTQKk6XSik7yrJoEysGtR2GRqvWnI9S7TYM= +go-simpler.org/musttag v0.13.0 h1:Q/YAW0AHvaoaIbsPj3bvEI5/QFP7w696IMUpnKXQfCE= +go-simpler.org/musttag v0.13.0/go.mod h1:FTzIGeK6OkKlUDVpj0iQUXZLUO1Js9+mvykDQy9C5yM= go-simpler.org/sloglint v0.7.2 h1:Wc9Em/Zeuu7JYpl+oKoYOsQSy2X560aVueCW/m6IijY= go-simpler.org/sloglint v0.7.2/go.mod h1:US+9C80ppl7VsThQclkM7BkCHQAzuz8kHLsW3ppuluo= go.mongodb.org/mongo-driver v1.16.1 h1:rIVLL3q0IHM39dvE+z2ulZLp9ENZKThVfuvN/IiN4l8= @@ -1445,8 +1447,8 @@ go.opentelemetry.io/proto/otlp v1.3.1/go.mod h1:0X1WI4de4ZsLrrJNLAQbFeLCm3T7yBkR go.step.sm/crypto v0.51.2 h1:5EiCGIMg7IvQTGmJrwRosbXeprtT80OhoS/PJarg60o= go.step.sm/crypto v0.51.2/go.mod h1:QK7czLjN2k+uqVp5CHXxJbhc70kVRSP+0CQF3zsR5M0= go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= -go.uber.org/automaxprocs v1.5.3 h1:kWazyxZUrS3Gs4qUpbwo5kEIMGe/DAvi5Z4tl2NW4j8= -go.uber.org/automaxprocs v1.5.3/go.mod h1:eRbA25aqJrxAbsLO0xy5jVwPt7FQnRgjW+efnwa1WM0= +go.uber.org/automaxprocs v1.6.0 h1:O3y2/QNTOdbF+e/dpXNNW7Rx2hZ4sTIPyybbxyNqTUs= +go.uber.org/automaxprocs v1.6.0/go.mod h1:ifeIMSnPZuznNm6jmdzmU3/bfk01Fe2fotchwEFJ8r8= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= @@ -1485,8 +1487,8 @@ golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliY golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4= golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= -golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= -golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= +golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1497,12 +1499,12 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e h1:I88y4caeGeuDQxgdoFPUq097j7kNfw6uvuiNxUBfcBk= -golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e/go.mod h1:akd2r19cwCdwSwWeIdzYQGa/EZZyqcOdwWiwj5L5eKQ= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f h1:phY1HzDcf18Aq9A8KkmRtY9WvOFIxN8wgfvy6Zm1DV8= -golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= +golang.org/x/exp/typeparams v0.0.0-20240909161429-701f63a606c0 h1:bVwtbF629Xlyxk6xLQq2TDYmqP0uiWaet5LwRebuY0k= +golang.org/x/exp/typeparams v0.0.0-20240909161429-701f63a606c0/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -1536,8 +1538,8 @@ golang.org/x/mod v0.7.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= -golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= +golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1602,8 +1604,8 @@ golang.org/x/net v0.11.0/go.mod h1:2L/ixqYpgIVXmeoSA/4Lu7BzTG4KIyPIryS4IsOd1oQ= golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk= golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY= -golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= -golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= +golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= +golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1645,8 +1647,8 @@ golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= -golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= +golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1743,8 +1745,8 @@ golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= -golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= +golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.1.0/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= @@ -1758,8 +1760,8 @@ golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU= golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= -golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= -golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= +golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= +golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1778,8 +1780,8 @@ golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.10.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= -golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= +golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= +golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1804,7 +1806,6 @@ golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20190910044552-dd2b5c81c578/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20191108193012-7d206e10da11/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -1861,8 +1862,8 @@ golang.org/x/tools v0.5.0/go.mod h1:N+Kgy78s5I24c24dU8OfWNEotWjutIs8SnJvn5IDq+k= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps= -golang.org/x/tools v0.24.0 h1:J1shsA93PJUEVaUSaay7UXAyE8aimq3GW0pjlolpa24= -golang.org/x/tools v0.24.0/go.mod h1:YhNqVBIfWHdzvTLs0d8LCuMhkKUgSUKldakyV7W/WDQ= +golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= +golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2047,8 +2048,8 @@ google.golang.org/grpc v1.46.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACu google.golang.org/grpc v1.46.2/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.47.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk= -google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= -google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0= +google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw= google.golang.org/grpc/stats/opentelemetry v0.0.0-20240907200651-3ffb98b2c93a h1:UIpYSuWdWHSzjwcAFRLjKcPXFZVVLXGEM23W+NWqipw= google.golang.org/grpc/stats/opentelemetry v0.0.0-20240907200651-3ffb98b2c93a/go.mod h1:9i1T9n4ZinTUZGgzENMi8MDDgbGC5mqTS75JAv6xN3A= @@ -2067,8 +2068,8 @@ google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQ google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= -google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= +google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go index aa85225108..2b8794dc2c 100644 --- a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go +++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go @@ -1,11 +1,9 @@ package analyzer import ( - "fmt" "go/ast" "go/token" - "strconv" - "strings" + "go/types" "unicode" "golang.org/x/tools/go/analysis" @@ -23,86 +21,61 @@ func New() *analysis.Analyzer { } } -type stringSet = map[string]struct{} - -var ( - importNodes = []ast.Node{(*ast.ImportSpec)(nil)} - typeNodes = []ast.Node{(*ast.TypeSpec)(nil)} - funcNodes = []ast.Node{(*ast.FuncDecl)(nil)} -) - func run(pass *analysis.Pass) (interface{}, error) { insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - pkgAliases := map[string]string{} - insp.Preorder(importNodes, func(node ast.Node) { - i := node.(*ast.ImportSpec) - if n := i.Name; n != nil && i.Path != nil { - if path, err := strconv.Unquote(i.Path.Value); err == nil { - pkgAliases[n.Name] = getPkgFromPath(path) - } - } - }) - - allTypes := stringSet{} - typesSpecs := map[string]*ast.TypeSpec{} - insp.Preorder(typeNodes, func(node ast.Node) { - t := node.(*ast.TypeSpec) - allTypes[t.Name.Name] = struct{}{} - typesSpecs[t.Name.Name] = t - }) - - errorTypes := stringSet{} - insp.Preorder(funcNodes, func(node ast.Node) { - f := node.(*ast.FuncDecl) - t, ok := isMethodError(f) - if !ok { - return - } - errorTypes[t] = struct{}{} - - tSpec, ok := typesSpecs[t] - if !ok { - panic(fmt.Sprintf("no specification for type %q", t)) - } - - if _, ok := tSpec.Type.(*ast.ArrayType); ok { - if !isValidErrorArrayTypeName(t) { - reportAboutErrorType(pass, tSpec.Pos(), t, true) - } - } else if !isValidErrorTypeName(t) { - reportAboutErrorType(pass, tSpec.Pos(), t, false) - } - }) - - errorFuncs := stringSet{} - insp.Preorder(funcNodes, func(node ast.Node) { - f := node.(*ast.FuncDecl) - if isFuncReturningErr(f.Type, allTypes, errorTypes) { - errorFuncs[f.Name.Name] = struct{}{} + insp.Nodes([]ast.Node{ + (*ast.TypeSpec)(nil), + (*ast.ValueSpec)(nil), + (*ast.FuncDecl)(nil), + }, func(node ast.Node, push bool) bool { + if !push { + return false } - }) - inspectPkgLevelVarsOnly := func(node ast.Node) bool { switch v := node.(type) { case *ast.FuncDecl: return false case *ast.ValueSpec: - if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) { - reportAboutErrorVar(pass, v.Pos(), name) + if len(v.Names) != 1 { + return false + } + ident := v.Names[0] + + if exprImplementsError(pass, ident) && !isValidErrorVarName(ident.Name) { + reportAboutSentinelError(pass, v.Pos(), ident.Name) + } + return false + + case *ast.TypeSpec: + tt := pass.TypesInfo.TypeOf(v.Name) + if tt == nil { + return false + } + // NOTE(a.telyshev): Pointer is the hack against Error() method with pointer receiver. + if !typeImplementsError(types.NewPointer(tt)) { + return false } + + name := v.Name.Name + if _, ok := v.Type.(*ast.ArrayType); ok { + if !isValidErrorArrayTypeName(name) { + reportAboutArrayErrorType(pass, v.Pos(), name) + } + } else if !isValidErrorTypeName(name) { + reportAboutErrorType(pass, v.Pos(), name) + } + return false } + return true - } - for _, f := range pass.Files { - ast.Inspect(f, inspectPkgLevelVarsOnly) - } + }) return nil, nil //nolint:nilnil } -func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) { +func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) { var form string if unicode.IsLower([]rune(typeName)[0]) { form = "xxxError" @@ -110,26 +83,26 @@ func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName strin form = "XxxError" } - if isArrayType { - form += "s" + pass.Reportf(typePos, "the error type name `%s` should conform to the `%s` format", typeName, form) +} + +func reportAboutArrayErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) { + var forms string + if unicode.IsLower([]rune(typeName)[0]) { + forms = "`xxxErrors` or `xxxError`" + } else { + forms = "`XxxErrors` or `XxxError`" } - pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form) + + pass.Reportf(typePos, "the error type name `%s` should conform to the %s format", typeName, forms) } -func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) { +func reportAboutSentinelError(pass *analysis.Pass, pos token.Pos, varName string) { var form string if unicode.IsLower([]rune(varName)[0]) { form = "errXxx" } else { form = "ErrXxx" } - pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form) -} - -func getPkgFromPath(p string) string { - idx := strings.LastIndex(p, "/") - if idx == -1 { - return p - } - return p[idx+1:] + pass.Reportf(pos, "the sentinel error name `%s` should conform to the `%s` format", varName, form) } diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go index 06f8d61d8e..04e14fb68d 100644 --- a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go +++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go @@ -1,58 +1,22 @@ package analyzer import ( - "fmt" "go/ast" - "go/token" "go/types" "strings" "unicode" -) - -func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) { - if f.Recv == nil || len(f.Recv.List) != 1 { - return "", false - } - if f.Name == nil || f.Name.Name != "Error" { - return "", false - } - if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 { - return "", false - } - - returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident) - if !ok { - return "", false - } - - var receiverType string - - unwrapIdentName := func(e ast.Expr) string { - switch v := e.(type) { - case *ast.Ident: - return v.Name - case *ast.IndexExpr: - if i, ok := v.X.(*ast.Ident); ok { - return i.Name - } - case *ast.IndexListExpr: - if i, ok := v.X.(*ast.Ident); ok { - return i.Name - } - } - panic(fmt.Errorf("unsupported Error() receiver type %q", types.ExprString(e))) - } + "golang.org/x/tools/go/analysis" +) - switch rt := f.Recv.List[0].Type; v := rt.(type) { - case *ast.Ident, *ast.IndexExpr, *ast.IndexListExpr: // SomeError, SomeError[T], SomeError[T1, T2, ...] - receiverType = unwrapIdentName(rt) +var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) - case *ast.StarExpr: // *SomeError, *SomeError[T], *SomeError[T1, T2, ...] - receiverType = unwrapIdentName(v.X) - } +func exprImplementsError(pass *analysis.Pass, e ast.Expr) bool { + return typeImplementsError(pass.TypesInfo.TypeOf(e)) +} - return receiverType, returnType.Name == "string" +func typeImplementsError(t types.Type) bool { + return t != nil && types.Implements(t, errorIface) } func isValidErrorTypeName(s string) bool { @@ -77,153 +41,12 @@ func isValidErrorArrayTypeName(s string) bool { words := split(s) wordsCnt := wordsCount(words) - if wordsCnt["errors"] != 1 { - return false - } - return words[len(words)-1] == "errors" -} - -func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool { - if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 { + if wordsCnt["errors"] != 1 && wordsCnt["error"] != 1 { return false } - var returnTypeName string - switch rt := fType.Results.List[0].Type.(type) { - case *ast.Ident: - returnTypeName = rt.Name - case *ast.StarExpr: - if i, ok := rt.X.(*ast.Ident); ok { - returnTypeName = i.Name - } - } - - return isErrorType(returnTypeName, allTypes, errorTypes) -} - -func isErrorType(tName string, allTypes, errorTypes stringSet) bool { - _, isUserType := allTypes[tName] - _, isErrType := errorTypes[tName] - return isErrType || (tName == "error" && !isUserType) -} - -var knownErrConstructors = stringSet{ - "fmt.Errorf": {}, - "errors.Errorf": {}, - "errors.New": {}, - "errors.Newf": {}, - "errors.NewWithDepth": {}, - "errors.NewWithDepthf": {}, - "errors.NewAssertionErrorWithWrappedErrf": {}, -} - -func isSentinelError( //nolint:gocognit,gocyclo - v *ast.ValueSpec, - pkgAliases map[string]string, - allTypes, errorTypes, errorFuncs stringSet, -) (varName string, ok bool) { - if len(v.Names) != 1 { - return "", false - } - varName = v.Names[0].Name - - switch vv := v.Type.(type) { - // var ErrEndOfFile error - // var ErrEndOfFile SomeErrType - case *ast.Ident: - if isErrorType(vv.Name, allTypes, errorTypes) { - return varName, true - } - - // var ErrEndOfFile *SomeErrType - case *ast.StarExpr: - if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) { - return varName, true - } - } - - if len(v.Values) != 1 { - return "", false - } - - switch vv := v.Values[0].(type) { - case *ast.CallExpr: - switch fun := vv.Fun.(type) { - // var ErrEndOfFile = errors.New("end of file") - case *ast.SelectorExpr: - pkg, ok := fun.X.(*ast.Ident) - if !ok { - return "", false - } - pkgFun := fun.Sel - - pkgName := pkg.Name - if a, ok := pkgAliases[pkgName]; ok { - pkgName = a - } - - _, ok = knownErrConstructors[pkgName+"."+pkgFun.Name] - return varName, ok - - // var ErrEndOfFile = newErrEndOfFile() - // var ErrEndOfFile = new(EndOfFileError) - // const ErrEndOfFile = constError("end of file") - // var statusCodeError = new(SomePtrError[string]) - case *ast.Ident: - if isErrorType(fun.Name, allTypes, errorTypes) { - return varName, true - } - - if _, ok := errorFuncs[fun.Name]; ok { - return varName, true - } - - if fun.Name == "new" && len(vv.Args) == 1 { - switch i := vv.Args[0].(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - - // var ErrEndOfFile = func() error { ... } - case *ast.FuncLit: - return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes) - } - - // var ErrEndOfFile = &EndOfFileError{} - // var ErrOK = &SomePtrError[string]{Code: "200 OK"} - case *ast.UnaryExpr: - if vv.Op == token.AND { // & - if lit, ok := vv.X.(*ast.CompositeLit); ok { - switch i := lit.Type.(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - } - - // var ErrEndOfFile = EndOfFileError{} - // var ErrNotFound = SomeError[string]{Code: "Not Found"} - case *ast.CompositeLit: - switch i := vv.Type.(type) { - case *ast.Ident: - return varName, isErrorType(i.Name, allTypes, errorTypes) - case *ast.IndexExpr: - if ii, ok := i.X.(*ast.Ident); ok { - return varName, isErrorType(ii.Name, allTypes, errorTypes) - } - } - } - - return "", false + lastWord := words[len(words)-1] + return lastWord == "errors" || lastWord == "error" } func isValidErrorVarName(s string) bool { diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go index 5646ee9094..5507d95462 100644 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go +++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go @@ -15,7 +15,8 @@ const ( name = "nilnil" doc = "Checks that there is no simultaneous return of `nil` error and an invalid value." - reportMsg = "return both the `nil` error and invalid value: use a sentinel error instead" + nilNilReportMsg = "return both a `nil` error and an invalid value: use a sentinel error instead" + notNilNotNilReportMsg = "return both a non-nil error and a valid value: use separate returns instead" ) // New returns new nilnil analyzer. @@ -28,18 +29,22 @@ func New() *analysis.Analyzer { Run: n.run, Requires: []*analysis.Analyzer{inspect.Analyzer}, } - a.Flags.Var(&n.checkedTypes, "checked-types", "coma separated list") + a.Flags.Var(&n.checkedTypes, "checked-types", "comma separated list of return types to check") + a.Flags.BoolVar(&n.detectOpposite, "detect-opposite", false, + "in addition, detect opposite situation (simultaneous return of non-nil error and valid value)") return a } type nilNil struct { - checkedTypes checkedTypes + checkedTypes checkedTypes + detectOpposite bool } func newNilNil() *nilNil { return &nilNil{ - checkedTypes: newDefaultCheckedTypes(), + checkedTypes: newDefaultCheckedTypes(), + detectOpposite: false, } } @@ -87,22 +92,22 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) { } ok, zv := n.isDangerNilType(fRes1Type) - if !(ok && isErrorType(fRes2Type)) { + if !(ok && implementsError(fRes2Type)) { return false } retVal, retErr := v.Results[0], v.Results[1] - var needWarn bool - switch zv { - case zeroValueNil: - needWarn = isNil(pass, retVal) && isNil(pass, retErr) - case zeroValueZero: - needWarn = isZero(retVal) && isNil(pass, retErr) + if ((zv == zeroValueNil) && isNil(pass, retVal) && isNil(pass, retErr)) || + ((zv == zeroValueZero) && isZero(retVal) && isNil(pass, retErr)) { + pass.Reportf(v.Pos(), nilNilReportMsg) + return false } - if needWarn { - pass.Reportf(v.Pos(), reportMsg) + if n.detectOpposite && (((zv == zeroValueNil) && !isNil(pass, retVal) && !isNil(pass, retErr)) || + ((zv == zeroValueZero) && !isZero(retVal) && !isNil(pass, retErr))) { + pass.Reportf(v.Pos(), notNilNotNilReportMsg) + return false } } @@ -152,7 +157,7 @@ func (n *nilNil) isDangerNilType(t types.Type) (bool, zeroValue) { var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface) -func isErrorType(t types.Type) bool { +func implementsError(t types.Type) bool { _, ok := t.Underlying().(*types.Interface) return ok && types.Implements(t, errorIface) } diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go index c9b8e3eedc..90ae548f30 100644 --- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go +++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go @@ -8,11 +8,11 @@ import ( func newDefaultCheckedTypes() checkedTypes { return checkedTypes{ - ptrType: {}, + chanType: {}, funcType: {}, ifaceType: {}, mapType: {}, - chanType: {}, + ptrType: {}, uintptrType: {}, unsafeptrType: {}, } diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go new file mode 100644 index 0000000000..cafc283e6f --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go @@ -0,0 +1,46 @@ +package analysisutil + +import "strings" + +var whitespaceRemover = strings.NewReplacer("\n", "", "\\n", "", "\t", "", "\\t", "", " ", "") + +// IsJSONLike returns true if the string has JSON format features. +// A positive result can be returned for invalid JSON as well. +func IsJSONLike(s string) bool { + s = whitespaceRemover.Replace(unescape(s)) + + var startMatch bool + for _, prefix := range []string{ + `{{`, `{[`, `{"`, + `[{{`, `[{[`, `[{"`, + } { + if strings.HasPrefix(s, prefix) { + startMatch = true + break + } + } + if !startMatch { + return false + } + + for _, keyValue := range []string{`":{`, `":[`, `":"`} { + if strings.Contains(s, keyValue) { + return true + } + } + return false + + // NOTE(a.telyshev): We do not check the end of the string, because this is usually a field for typos. + // And one of the reasons for using JSON-specific assertions is to catch typos like this. +} + +func unescape(s string) string { + s = strings.ReplaceAll(s, `\"`, `"`) + s = unquote(s, `"`) + s = unquote(s, "`") + return s +} + +func unquote(s string, q string) string { + return strings.TrimLeft(strings.TrimRight(s, q), q) +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go index 3fc1f42b86..d552609182 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go +++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go @@ -2,6 +2,7 @@ package analysisutil import ( "go/ast" + "slices" "strconv" ) @@ -17,11 +18,8 @@ func Imports(file *ast.File, pkgs ...string) bool { if err != nil { continue } - // NOTE(a.telyshev): Don't use `slices.Contains` to keep the minimum module version 1.20. - for _, pkg := range pkgs { // Small O(n). - if pkg == path { - return true - } + if slices.Contains(pkgs, path) { // Small O(n). + return true } } return false diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go index 403691e270..56cd64e078 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go @@ -53,7 +53,7 @@ func (checker BlankImport) Check(pass *analysis.Pass, _ *inspector.Inspector) (d } msg := fmt.Sprintf("avoid blank import of %s as it does nothing", pkg) - diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg, nil)) + diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg)) } } return diagnostics diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go index d125c43f92..67959b633b 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go @@ -49,13 +49,11 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis. } survivingArg = newBoolCast(survivingArg) } - return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ - Pos: replaceStart, - End: replaceEnd, - NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) + return newUseFunctionDiagnostic(checker.Name(), call, proposed, analysis.TextEdit{ + Pos: replaceStart, + End: replaceEnd, + NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), + }) } newUseTrueDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic { @@ -74,7 +72,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis. survivingArg = newBoolCast(survivingArg) } return newDiagnostic(checker.Name(), call, "need to simplify the assertion", - &analysis.SuggestedFix{ + analysis.SuggestedFix{ Message: "Simplify the assertion", TextEdits: []analysis.TextEdit{{ Pos: replaceStart, @@ -106,7 +104,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis. case xor(t1, t2): survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1) if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) { - // NOTE(a.telyshev): `Exactly` assumes no type casting. + // NOTE(a.telyshev): `Exactly` assumes no type conversion. return nil } return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End()) @@ -114,7 +112,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis. case xor(f1, f2): survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1) if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) { - // NOTE(a.telyshev): `Exactly` assumes no type casting. + // NOTE(a.telyshev): `Exactly` assumes no type conversion. return nil } return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End()) diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go index 17c7d14ee9..f881be4f20 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go @@ -13,10 +13,13 @@ var registry = checkersRegistry{ {factory: asCheckerFactory(NewLen), enabledByDefault: true}, {factory: asCheckerFactory(NewNegativePositive), enabledByDefault: true}, {factory: asCheckerFactory(NewCompares), enabledByDefault: true}, + {factory: asCheckerFactory(NewContains), enabledByDefault: true}, {factory: asCheckerFactory(NewErrorNil), enabledByDefault: true}, {factory: asCheckerFactory(NewNilCompare), enabledByDefault: true}, {factory: asCheckerFactory(NewErrorIsAs), enabledByDefault: true}, + {factory: asCheckerFactory(NewEncodedCompare), enabledByDefault: true}, {factory: asCheckerFactory(NewExpectedActual), enabledByDefault: true}, + {factory: asCheckerFactory(NewRegexp), enabledByDefault: true}, {factory: asCheckerFactory(NewSuiteExtraAssertCall), enabledByDefault: true}, {factory: asCheckerFactory(NewSuiteDontUsePkg), enabledByDefault: true}, {factory: asCheckerFactory(NewUselessAssert), enabledByDefault: true}, diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go index bdde03d95e..f0c4013f16 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go @@ -61,7 +61,9 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia return nil } - if isPointer(pass, be.X) && isPointer(pass, be.Y) { + _, xp := isPointer(pass, be.X) + _, yp := isPointer(pass, be.Y) + if xp && yp { switch proposedFn { case "Equal": proposedFn = "Same" @@ -72,12 +74,11 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia a, b := be.X, be.Y return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ + analysis.TextEdit{ Pos: be.X.Pos(), End: be.Y.End(), NewText: formatAsCallArgs(pass, a, b), - }), - ) + }) } var tokenToProposedFnInsteadOfTrue = map[token.Token]string{ diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go new file mode 100644 index 0000000000..07f76c6e4f --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go @@ -0,0 +1,71 @@ +package checkers + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" +) + +// Contains detects situations like +// +// assert.True(t, strings.Contains(a, "abc123")) +// assert.False(t, !strings.Contains(a, "abc123")) +// +// assert.False(t, strings.Contains(a, "abc123")) +// assert.True(t, !strings.Contains(a, "abc123")) +// +// and requires +// +// assert.Contains(t, a, "abc123") +// assert.NotContains(t, a, "abc123") +type Contains struct{} + +// NewContains constructs Contains checker. +func NewContains() Contains { return Contains{} } +func (Contains) Name() string { return "contains" } + +func (checker Contains) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { + if len(call.Args) < 1 { + return nil + } + + expr := call.Args[0] + unpacked, isNeg := isNegation(expr) + if isNeg { + expr = unpacked + } + + ce, ok := expr.(*ast.CallExpr) + if !ok || len(ce.Args) != 2 { + return nil + } + + if !isStringsContainsCall(pass, ce) { + return nil + } + + var proposed string + switch call.Fn.NameFTrimmed { + default: + return nil + + case "True": + proposed = "Contains" + if isNeg { + proposed = "NotContains" + } + + case "False": + proposed = "NotContains" + if isNeg { + proposed = "Contains" + } + } + + return newUseFunctionDiagnostic(checker.Name(), call, proposed, + analysis.TextEdit{ + Pos: call.Args[0].Pos(), + End: call.Args[0].End(), + NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]), + }) +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go index eafecb678d..71657fe117 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go @@ -53,25 +53,28 @@ func (checker Empty) checkEmpty(pass *analysis.Pass, call *CallMeta) *analysis.D newUseEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { const proposed = "Empty" return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: replaceStart, End: replaceEnd, NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) + }) } if len(call.Args) == 0 { return nil } - a := call.Args[0] + switch call.Fn.NameFTrimmed { - case "Zero", "Empty": - lenArg, ok := isBuiltinLenCall(pass, a) - if ok { + case "Zero": + if lenArg, ok := isBuiltinLenCall(pass, a); ok { return newUseEmptyDiagnostic(a.Pos(), a.End(), lenArg) } + + case "Empty": + if lenArg, ok := isBuiltinLenCall(pass, a); ok { + return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg) + } } if len(call.Args) < 2 { @@ -120,25 +123,28 @@ func (checker Empty) checkNotEmpty(pass *analysis.Pass, call *CallMeta) *analysi newUseNotEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { const proposed = "NotEmpty" return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: replaceStart, End: replaceEnd, NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) + }) } if len(call.Args) == 0 { return nil } - a := call.Args[0] + switch call.Fn.NameFTrimmed { - case "NotZero", "NotEmpty", "Positive": - lenArg, ok := isBuiltinLenCall(pass, a) - if ok { + case "NotZero", "Positive": + if lenArg, ok := isBuiltinLenCall(pass, a); ok { return newUseNotEmptyDiagnostic(a.Pos(), a.End(), lenArg) } + + case "NotEmpty": + if lenArg, ok := isBuiltinLenCall(pass, a); ok { + return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg) + } } if len(call.Args) < 2 { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go new file mode 100644 index 0000000000..53c74ac453 --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go @@ -0,0 +1,101 @@ +package checkers + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" +) + +// EncodedCompare detects situations like +// +// assert.Equal(t, `{"foo": "bar"}`, body) +// assert.EqualValues(t, `{"foo": "bar"}`, body) +// assert.Exactly(t, `{"foo": "bar"}`, body) +// assert.Equal(t, expectedJSON, resultJSON) +// assert.Equal(t, expBodyConst, w.Body.String()) +// assert.Equal(t, fmt.Sprintf(`{"value":"%s"}`, hexString), result) +// assert.Equal(t, "{}", json.RawMessage(resp)) +// assert.Equal(t, expJSON, strings.Trim(string(resultJSONBytes), "\n")) // + Replace, ReplaceAll, TrimSpace +// +// assert.Equal(t, expectedYML, conf) +// +// and requires +// +// assert.JSONEq(t, `{"foo": "bar"}`, body) +// assert.YAMLEq(t, expectedYML, conf) +type EncodedCompare struct{} + +// NewEncodedCompare constructs EncodedCompare checker. +func NewEncodedCompare() EncodedCompare { return EncodedCompare{} } +func (EncodedCompare) Name() string { return "encoded-compare" } + +func (checker EncodedCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { + switch call.Fn.NameFTrimmed { + case "Equal", "EqualValues", "Exactly": + default: + return nil + } + + if len(call.Args) < 2 { + return nil + } + lhs, rhs := call.Args[0], call.Args[1] + + a, aIsExplicitJSON := checker.unwrap(pass, call.Args[0]) + b, bIsExplicitJSON := checker.unwrap(pass, call.Args[1]) + + var proposed string + switch { + case aIsExplicitJSON, bIsExplicitJSON, isJSONStyleExpr(pass, a), isJSONStyleExpr(pass, b): + proposed = "JSONEq" + case isYAMLStyleExpr(a), isYAMLStyleExpr(b): + proposed = "YAMLEq" + } + + if proposed != "" { + return newUseFunctionDiagnostic(checker.Name(), call, proposed, + analysis.TextEdit{ + Pos: lhs.Pos(), + End: lhs.End(), + NewText: formatWithStringCastForBytes(pass, a), + }, + analysis.TextEdit{ + Pos: rhs.Pos(), + End: rhs.End(), + NewText: formatWithStringCastForBytes(pass, b), + }, + ) + } + return nil +} + +// unwrap unwraps expression from string, []byte, strings.Replace(All), strings.Trim(Space) and json.RawMessage conversions. +// Returns true in the second argument, if json.RawMessage was in the chain. +func (checker EncodedCompare) unwrap(pass *analysis.Pass, e ast.Expr) (ast.Expr, bool) { + ce, ok := e.(*ast.CallExpr) + if !ok { + return e, false + } + if len(ce.Args) == 0 { + return e, false + } + + if isJSONRawMessageCast(pass, ce) { + if isNil(ce.Args[0]) { // NOTE(a.telyshev): Ignore json.RawMessage(nil) case. + return checker.unwrap(pass, ce.Args[0]) + } + + v, _ := checker.unwrap(pass, ce.Args[0]) + return v, true + } + + if isIdentWithName("string", ce.Fun) || + isByteArray(ce.Fun) || + isStringsReplaceCall(pass, ce) || + isStringsReplaceAllCall(pass, ce) || + isStringsTrimCall(pass, ce) || + isStringsTrimSpaceCall(pass, ce) { + return checker.unwrap(pass, ce.Args[0]) + } + return e, false +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go index ab92c2ec0b..f2812c9393 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go @@ -67,12 +67,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di } if proposed != "" { return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: ce.Pos(), End: ce.End(), NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]), - }), - ) + }) } case "False": @@ -91,12 +90,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di if isErrorsIsCall(pass, ce) { const proposed = "NotErrorIs" return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: ce.Pos(), End: ce.End(), NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]), - }), - ) + }) } case "ErrorAs": @@ -127,15 +125,15 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di pt, ok := tv.Type.Underlying().(*types.Pointer) if !ok { - return newDiagnostic(checker.Name(), call, defaultReport, nil) + return newDiagnostic(checker.Name(), call, defaultReport) } if pt.Elem() == errorType { - return newDiagnostic(checker.Name(), call, errorPtrReport, nil) + return newDiagnostic(checker.Name(), call, errorPtrReport) } _, isInterface := pt.Elem().Underlying().(*types.Interface) if !isInterface && !types.Implements(pt.Elem(), errorIface) { - return newDiagnostic(checker.Name(), call, defaultReport, nil) + return newDiagnostic(checker.Name(), call, defaultReport) } } return nil diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go index 1e56d222ab..b9f28df218 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go @@ -12,12 +12,16 @@ import ( // ErrorNil detects situations like // // assert.Nil(t, err) -// assert.NotNil(t, err) +// assert.Empty(t, err) +// assert.Zero(t, err) // assert.Equal(t, nil, err) // assert.EqualValues(t, nil, err) // assert.Exactly(t, nil, err) // assert.ErrorIs(t, err, nil) // +// assert.NotNil(t, err) +// assert.NotEmpty(t, err) +// assert.NotZero(t, err) // assert.NotEqual(t, nil, err) // assert.NotEqualValues(t, nil, err) // assert.NotErrorIs(t, err, nil) @@ -40,12 +44,12 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia proposedFn, survivingArg, replacementEndPos := func() (string, ast.Expr, token.Pos) { switch call.Fn.NameFTrimmed { - case "Nil": + case "Nil", "Empty", "Zero": if len(call.Args) >= 1 && isError(pass, call.Args[0]) { return noErrorFn, call.Args[0], call.Args[0].End() } - case "NotNil": + case "NotNil", "NotEmpty", "NotZero": if len(call.Args) >= 1 && isError(pass, call.Args[0]) { return errorFn, call.Args[0], call.Args[0].End() } @@ -81,12 +85,11 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia if proposedFn != "" { return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ + analysis.TextEdit{ Pos: call.Args[0].Pos(), End: replacementEndPos, NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) + }) } return nil } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go index 77784dc7b1..351d675cef 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go @@ -87,7 +87,7 @@ func (checker ExpectedActual) Check(pass *analysis.Pass, call *CallMeta) *analys first, second := call.Args[0], call.Args[1] if checker.isWrongExpectedActualOrder(pass, first, second) { - return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", &analysis.SuggestedFix{ + return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", analysis.SuggestedFix{ Message: "Reverse actual and expected values", TextEdits: []analysis.TextEdit{ { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go index 7436f9ca1d..6bc22cd021 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go @@ -44,7 +44,7 @@ func (checker FloatCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis if call.Fn.IsFmt { format = "use %s.InEpsilonf (or InDeltaf)" } - return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr), nil) + return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr)) } return nil } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go index 3401bb0977..896b6bf5fa 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go @@ -1,8 +1,6 @@ package checkers import ( - "fmt" - "go/ast" "go/types" "strconv" @@ -60,7 +58,7 @@ func (checker Formatter) Check(pass *analysis.Pass, call *CallMeta) (result *ana } func (checker Formatter) checkNotFmtAssertion(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { - msgAndArgsPos, ok := isPrintfLikeCall(pass, call, call.Fn.Signature) + msgAndArgsPos, ok := isPrintfLikeCall(pass, call) if !ok { return nil } @@ -71,21 +69,15 @@ func (checker Formatter) checkNotFmtAssertion(pass *analysis.Pass, call *CallMet msgAndArgs := call.ArgsRaw[msgAndArgsPos] if args, ok := isFmtSprintfCall(pass, msgAndArgs); ok { if checker.requireFFuncs { - msg := fmt.Sprintf("remove unnecessary fmt.Sprintf and use %s.%s", call.SelectorXStr, fFunc) - return newDiagnostic(checker.Name(), call, msg, - newSuggestedFuncReplacement(call, fFunc, analysis.TextEdit{ - Pos: msgAndArgs.Pos(), - End: msgAndArgs.End(), - NewText: formatAsCallArgs(pass, args...), - }), - ) + return newRemoveFnAndUseDiagnostic(pass, checker.Name(), call, fFunc, + "fmt.Sprintf", msgAndArgs, args...) } return newRemoveSprintfDiagnostic(pass, checker.Name(), call, msgAndArgs, args) } } if checker.requireFFuncs { - return newUseFunctionDiagnostic(checker.Name(), call, fFunc, newSuggestedFuncReplacement(call, fFunc)) + return newUseFunctionDiagnostic(checker.Name(), call, fFunc) } return nil } @@ -109,7 +101,7 @@ func (checker Formatter) checkFmtAssertion(pass *analysis.Pass, call *CallMeta) defer func() { pass.Report = report }() pass.Report = func(d analysis.Diagnostic) { - result = newDiagnostic(checker.Name(), call, d.Message, nil) + result = newDiagnostic(checker.Name(), call, d.Message) } format, err := strconv.Unquote(analysisutil.NodeString(pass.Fset, msg)) @@ -121,21 +113,51 @@ func (checker Formatter) checkFmtAssertion(pass *analysis.Pass, call *CallMeta) return result } -func isPrintfLikeCall(pass *analysis.Pass, call *CallMeta, sig *types.Signature) (int, bool) { - msgAndArgsPos := getMsgAndArgsPosition(sig) +func isPrintfLikeCall(pass *analysis.Pass, call *CallMeta) (int, bool) { + msgAndArgsPos := getMsgAndArgsPosition(call.Fn.Signature) if msgAndArgsPos < 0 { return -1, false } - fmtFn := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, call.Fn.Name+"f") - if fmtFn == nil { - // NOTE(a.telyshev): No formatted analogue of assertion. + if !assertHasFormattedAnalogue(pass, call) { return -1, false } return msgAndArgsPos, len(call.ArgsRaw) > msgAndArgsPos } +func assertHasFormattedAnalogue(pass *analysis.Pass, call *CallMeta) bool { + if fn := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, call.Fn.Name+"f"); fn != nil { + return true + } + + if fn := analysisutil.ObjectOf(pass.Pkg, testify.RequirePkgPath, call.Fn.Name+"f"); fn != nil { + return true + } + + recv := call.Fn.Signature.Recv() + if recv == nil { + return false + } + + recvT := recv.Type() + if ptr, ok := recv.Type().(*types.Pointer); ok { + recvT = ptr.Elem() + } + + suite, ok := recvT.(*types.Named) + if !ok { + return false + } + for i := 0; i < suite.NumMethods(); i++ { + if suite.Method(i).Name() == call.Fn.Name+"f" { + return true + } + } + + return false +} + func getMsgAndArgsPosition(sig *types.Signature) int { params := sig.Params() if params.Len() < 1 { @@ -162,26 +184,3 @@ func getMsgPosition(sig *types.Signature) int { } return -1 } - -func isFmtSprintfCall(pass *analysis.Pass, expr ast.Expr) ([]ast.Expr, bool) { - ce, ok := expr.(*ast.CallExpr) - if !ok { - return nil, false - } - - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return nil, false - } - - sprintfObj := analysisutil.ObjectOf(pass.Pkg, "fmt", "Sprintf") - if sprintfObj == nil { - return nil, false - } - - if !analysisutil.IsObj(pass.TypesInfo, se.Sel, sprintfObj) { - return nil, false - } - - return ce.Args, true -} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go index 060c960330..8b0d39999e 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go @@ -142,11 +142,11 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect if testifyCall != nil { switch checker.checkCall(testifyCall) { case goRequireVerdictRequire: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"), nil) + d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require")) diagnostics = append(diagnostics, *d) case goRequireVerdictAssertFailNow: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall), nil) + d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall)) diagnostics = append(diagnostics, *d) case goRequireVerdictNoExit: @@ -163,7 +163,7 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit { caller := analysisutil.NodeString(pass.Fset, ce.Fun) - d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller), nil) + d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller)) diagnostics = append(diagnostics, *d) } } @@ -198,11 +198,11 @@ func (checker GoRequire) checkHTTPHandlers(pass *analysis.Pass, insp *inspector. switch checker.checkCall(testifyCall) { case goRequireVerdictRequire: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require"), nil) + d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require")) diagnostics = append(diagnostics, *d) case goRequireVerdictAssertFailNow: - d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall), nil) + d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall)) diagnostics = append(diagnostics, *d) case goRequireVerdictNoExit: diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go index 432a3032c9..9b43e914cc 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go @@ -1,10 +1,10 @@ package checkers import ( - "fmt" "go/ast" "go/token" "go/types" + "strconv" "golang.org/x/tools/go/analysis" ) @@ -56,9 +56,29 @@ func isTypedIntNumber(e ast.Expr, v int, types ...string) bool { return false } -func isIntNumber(e ast.Expr, v int) bool { +func isIntNumber(e ast.Expr, rhs int) bool { + lhs, ok := isIntBasicLit(e) + return ok && (lhs == rhs) +} + +func isNegativeIntNumber(e ast.Expr) bool { + v, ok := isIntBasicLit(e) + return ok && v < 0 +} + +func isPositiveIntNumber(e ast.Expr) bool { + v, ok := isIntBasicLit(e) + return ok && v > 0 +} + +func isEmptyStringLit(e ast.Expr) bool { + bl, ok := e.(*ast.BasicLit) + return ok && bl.Kind == token.STRING && bl.Value == `""` +} + +func isNotEmptyStringLit(e ast.Expr) bool { bl, ok := e.(*ast.BasicLit) - return ok && bl.Kind == token.INT && bl.Value == fmt.Sprintf("%d", v) + return ok && bl.Kind == token.STRING && bl.Value != `""` } func isBasicLit(e ast.Expr) bool { @@ -66,9 +86,27 @@ func isBasicLit(e ast.Expr) bool { return ok } -func isIntBasicLit(e ast.Expr) bool { +func isIntBasicLit(e ast.Expr) (int, bool) { + if un, ok := e.(*ast.UnaryExpr); ok { + if un.Op == token.SUB { + v, ok := isIntBasicLit(un.X) + return -1 * v, ok + } + } + bl, ok := e.(*ast.BasicLit) - return ok && bl.Kind == token.INT + if !ok { + return 0, false + } + if bl.Kind != token.INT { + return 0, false + } + + v, err := strconv.Atoi(bl.Value) + if err != nil { + return 0, false + } + return v, true } func isUntypedConst(pass *analysis.Pass, e ast.Expr) bool { @@ -98,12 +136,37 @@ func isUnderlying(pass *analysis.Pass, e ast.Expr, flag types.BasicInfo) bool { return ok && (bt.Info()&flag > 0) } -func isPointer(pass *analysis.Pass, e ast.Expr) bool { - _, ok := pass.TypesInfo.TypeOf(e).(*types.Pointer) - return ok +func isPointer(pass *analysis.Pass, e ast.Expr) (types.Type, bool) { + ptr, ok := pass.TypesInfo.TypeOf(e).(*types.Pointer) + if !ok { + return nil, false + } + return ptr.Elem(), true +} + +// isByteArray returns true if expression is `[]byte` itself. +func isByteArray(e ast.Expr) bool { + at, ok := e.(*ast.ArrayType) + return ok && isIdentWithName("byte", at.Elt) +} + +// hasBytesType returns true if the expression is of `[]byte` type. +func hasBytesType(pass *analysis.Pass, e ast.Expr) bool { + t := pass.TypesInfo.TypeOf(e) + if t == nil { + return false + } + + sl, ok := t.(*types.Slice) + if !ok { + return false + } + + el, ok := sl.Elem().(*types.Basic) + return ok && el.Kind() == types.Uint8 } -// untype returns v from type(v) expression or v itself if there is no type cast. +// untype returns v from type(v) expression or v itself if there is no type conversion. func untype(e ast.Expr) ast.Expr { ce, ok := e.(*ast.CallExpr) if !ok || len(ce.Args) != 1 { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go index 3ae88a560e..f12d87aa35 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go @@ -7,11 +7,32 @@ import ( "golang.org/x/tools/go/analysis" ) +func newRemoveFnAndUseDiagnostic( + pass *analysis.Pass, + checker string, + call *CallMeta, + proposedFn string, + removedFn string, + removedFnPos analysis.Range, + removedFnArgs ...ast.Expr, +) *analysis.Diagnostic { + f := proposedFn + if call.Fn.IsFmt { + f += "f" + } + msg := fmt.Sprintf("remove unnecessary %s and use %s.%s", removedFn, call.SelectorXStr, f) + + return newDiagnostic(checker, call, msg, + newSuggestedFuncRemoving(pass, removedFn, removedFnPos, removedFnArgs...), + newSuggestedFuncReplacement(call, proposedFn), + ) +} + func newUseFunctionDiagnostic( checker string, call *CallMeta, proposedFn string, - fix *analysis.SuggestedFix, + additionalEdits ...analysis.TextEdit, ) *analysis.Diagnostic { f := proposedFn if call.Fn.IsFmt { @@ -19,33 +40,57 @@ func newUseFunctionDiagnostic( } msg := fmt.Sprintf("use %s.%s", call.SelectorXStr, f) - return newDiagnostic(checker, call, msg, fix) + return newDiagnostic(checker, call, msg, + newSuggestedFuncReplacement(call, proposedFn, additionalEdits...)) +} + +func newRemoveLenDiagnostic( + pass *analysis.Pass, + checker string, + call *CallMeta, + fnPos analysis.Range, + fnArg ast.Expr, +) *analysis.Diagnostic { + return newRemoveFnDiagnostic(pass, checker, call, "len", fnPos, fnArg) +} + +func newRemoveMustCompileDiagnostic( + pass *analysis.Pass, + checker string, + call *CallMeta, + fnPos analysis.Range, + fnArg ast.Expr, +) *analysis.Diagnostic { + return newRemoveFnDiagnostic(pass, checker, call, "regexp.MustCompile", fnPos, fnArg) } func newRemoveSprintfDiagnostic( pass *analysis.Pass, checker string, call *CallMeta, - sprintfPos analysis.Range, - sprintfArgs []ast.Expr, + fnPos analysis.Range, + fnArgs []ast.Expr, ) *analysis.Diagnostic { - return newDiagnostic(checker, call, "remove unnecessary fmt.Sprintf", &analysis.SuggestedFix{ - Message: "Remove `fmt.Sprintf`", - TextEdits: []analysis.TextEdit{ - { - Pos: sprintfPos.Pos(), - End: sprintfPos.End(), - NewText: formatAsCallArgs(pass, sprintfArgs...), - }, - }, - }) + return newRemoveFnDiagnostic(pass, checker, call, "fmt.Sprintf", fnPos, fnArgs...) +} + +func newRemoveFnDiagnostic( + pass *analysis.Pass, + checker string, + call *CallMeta, + fnName string, + fnPos analysis.Range, + fnArgs ...ast.Expr, +) *analysis.Diagnostic { + return newDiagnostic(checker, call, "remove unnecessary "+fnName, + newSuggestedFuncRemoving(pass, fnName, fnPos, fnArgs...)) } func newDiagnostic( checker string, rng analysis.Range, msg string, - fix *analysis.SuggestedFix, + fixes ...analysis.SuggestedFix, ) *analysis.Diagnostic { d := analysis.Diagnostic{ Pos: rng.Pos(), @@ -53,21 +98,39 @@ func newDiagnostic( Category: checker, Message: checker + ": " + msg, } - if fix != nil { - d.SuggestedFixes = []analysis.SuggestedFix{*fix} + if len(fixes) != 0 { + d.SuggestedFixes = fixes } return &d } +func newSuggestedFuncRemoving( + pass *analysis.Pass, + fnName string, + fnPos analysis.Range, + fnArgs ...ast.Expr, +) analysis.SuggestedFix { + return analysis.SuggestedFix{ + Message: fmt.Sprintf("Remove `%s`", fnName), + TextEdits: []analysis.TextEdit{ + { + Pos: fnPos.Pos(), + End: fnPos.End(), + NewText: formatAsCallArgs(pass, fnArgs...), + }, + }, + } +} + func newSuggestedFuncReplacement( call *CallMeta, proposedFn string, additionalEdits ...analysis.TextEdit, -) *analysis.SuggestedFix { +) analysis.SuggestedFix { if call.Fn.IsFmt { proposedFn += "f" } - return &analysis.SuggestedFix{ + return analysis.SuggestedFix{ Message: fmt.Sprintf("Replace `%s` with `%s`", call.Fn.Name, proposedFn), TextEdits: append([]analysis.TextEdit{ { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go new file mode 100644 index 0000000000..35a497a724 --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go @@ -0,0 +1,40 @@ +package checkers + +import ( + "go/ast" + "go/token" + "regexp" + + "golang.org/x/tools/go/analysis" + + "github.com/Antonboom/testifylint/internal/analysisutil" +) + +var ( + jsonIdentRe = regexp.MustCompile(`json|JSON|Json`) + yamlIdentRe = regexp.MustCompile(`yaml|YAML|Yaml|yml|YML|Yml`) +) + +func isJSONStyleExpr(pass *analysis.Pass, e ast.Expr) bool { + if isIdentNamedAfterPattern(jsonIdentRe, e) { + return true + } + + if t, ok := pass.TypesInfo.Types[e]; ok && t.Value != nil { + return analysisutil.IsJSONLike(t.Value.String()) + } + + if bl, ok := e.(*ast.BasicLit); ok { + return bl.Kind == token.STRING && analysisutil.IsJSONLike(bl.Value) + } + + if args, ok := isFmtSprintfCall(pass, e); ok { + return isJSONStyleExpr(pass, args[0]) + } + + return false +} + +func isYAMLStyleExpr(e ast.Expr) bool { + return isIdentNamedAfterPattern(yamlIdentRe, e) +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go index 55cd5fd05a..859a39ee87 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go @@ -5,8 +5,6 @@ import ( "go/types" "golang.org/x/tools/go/analysis" - - "github.com/Antonboom/testifylint/internal/analysisutil" ) var ( @@ -20,23 +18,9 @@ func isError(pass *analysis.Pass, expr ast.Expr) bool { } func isErrorsIsCall(pass *analysis.Pass, ce *ast.CallExpr) bool { - return isErrorsPkgFnCall(pass, ce, "Is") + return isPkgFnCall(pass, ce, "errors", "Is") } func isErrorsAsCall(pass *analysis.Pass, ce *ast.CallExpr) bool { - return isErrorsPkgFnCall(pass, ce, "As") -} - -func isErrorsPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, fn string) bool { - se, ok := ce.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - - errorsIsObj := analysisutil.ObjectOf(pass.Pkg, "errors", fn) - if errorsIsObj == nil { - return false - } - - return analysisutil.IsObj(pass.TypesInfo, se.Sel, errorsIsObj) + return isPkgFnCall(pass, ce, "errors", "As") } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go index 765fce5276..d69c42860f 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go @@ -3,6 +3,7 @@ package checkers import ( "bytes" "go/ast" + "strings" "golang.org/x/tools/go/analysis" @@ -24,3 +25,37 @@ func formatAsCallArgs(pass *analysis.Pass, args ...ast.Expr) []byte { } return buf.Bytes() } + +func formatWithStringCastForBytes(pass *analysis.Pass, e ast.Expr) []byte { + if !hasBytesType(pass, e) { + return analysisutil.NodeBytes(pass.Fset, e) + } + + if se, ok := isBufferBytesCall(pass, e); ok { + return []byte(analysisutil.NodeString(pass.Fset, se) + ".String()") + } + return []byte("string(" + analysisutil.NodeString(pass.Fset, e) + ")") +} + +func isBufferBytesCall(pass *analysis.Pass, e ast.Expr) (ast.Node, bool) { + ce, ok := e.(*ast.CallExpr) + if !ok { + return nil, false + } + + se, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + return nil, false + } + + if !isIdentWithName("Bytes", se.Sel) { + return nil, false + } + if t := pass.TypesInfo.TypeOf(se.X); t != nil { + // NOTE(a.telyshev): This is hack, because `bytes` package can be not imported, + // and we cannot do "true" comparison with `Buffer` object. + return se.X, strings.TrimPrefix(t.String(), "*") == "bytes.Buffer" + } + + return nil, false +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go index b0c0d13020..ad39c72d74 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go @@ -15,8 +15,11 @@ func isEmptyInterface(pass *analysis.Pass, expr ast.Expr) bool { if !ok { return false } + return isEmptyInterfaceType(t.Type) +} - iface, ok := t.Type.Underlying().(*types.Interface) +func isEmptyInterfaceType(t types.Type) bool { + iface, ok := t.Underlying().(*types.Interface) return ok && iface.NumMethods() == 0 } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go new file mode 100644 index 0000000000..daf309339c --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go @@ -0,0 +1,59 @@ +package checkers + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/Antonboom/testifylint/internal/analysisutil" +) + +func isFmtSprintfCall(pass *analysis.Pass, e ast.Expr) ([]ast.Expr, bool) { + ce, ok := e.(*ast.CallExpr) + if !ok { + return nil, false + } + return ce.Args, isPkgFnCall(pass, ce, "fmt", "Sprintf") +} + +func isJSONRawMessageCast(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "encoding/json", "RawMessage") +} + +func isRegexpMustCompileCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "regexp", "MustCompile") +} + +func isStringsContainsCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "strings", "Contains") +} + +func isStringsReplaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "strings", "Replace") +} + +func isStringsReplaceAllCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "strings", "ReplaceAll") +} + +func isStringsTrimCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "strings", "Trim") +} + +func isStringsTrimSpaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool { + return isPkgFnCall(pass, ce, "strings", "TrimSpace") +} + +func isPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, pkg, fn string) bool { + se, ok := ce.Fun.(*ast.SelectorExpr) + if !ok { + return false + } + + fnObj := analysisutil.ObjectOf(pass.Pkg, pkg, fn) + if fnObj == nil { + return false + } + + return analysisutil.IsObj(pass.TypesInfo, se.Sel, fnObj) +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go index 47330568c6..c240a61744 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go @@ -31,17 +31,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost a, b := call.Args[0], call.Args[1] if lenArg, expectedLen, ok := xorLenCall(pass, a, b); ok { - if expectedLen == b && !isIntBasicLit(expectedLen) { + if _, ok := isIntBasicLit(expectedLen); (expectedLen == b) && !ok { // https://github.com/Antonboom/testifylint/issues/9 return nil } return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ + analysis.TextEdit{ Pos: a.Pos(), End: b.End(), NewText: formatAsCallArgs(pass, lenArg, expectedLen), - }), - ) + }) } case "True": @@ -50,14 +49,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost } expr := call.Args[0] - if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok && isIntBasicLit(expectedLen) { + if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok { + if _, ok := isIntBasicLit(expectedLen); !ok { + return nil + } return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ + analysis.TextEdit{ Pos: expr.Pos(), End: expr.End(), NewText: formatAsCallArgs(pass, lenArg, expectedLen), - }), - ) + }) } } return nil diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go similarity index 96% rename from vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go rename to vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go index 274021f679..a61bbdfcb8 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go @@ -48,12 +48,11 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet newUseNegativeDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { const proposed = "Negative" return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: replaceStart, End: replaceEnd, NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) + }) } // NOTE(a.telyshev): We ignore uint-asserts as being no sense for assert.Negative. @@ -114,12 +113,11 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet newUsePositiveDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic { const proposed = "Positive" return newUseFunctionDiagnostic(checker.Name(), call, proposed, - newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{ + analysis.TextEdit{ Pos: replaceStart, End: replaceEnd, NewText: analysisutil.NodeBytes(pass.Fset, replaceWith), - }), - ) + }) } switch call.Fn.NameFTrimmed { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go index 47c4a7383f..fc1adb7ead 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go @@ -47,10 +47,9 @@ func (checker NilCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.D } return newUseFunctionDiagnostic(checker.Name(), call, proposedFn, - newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{ + analysis.TextEdit{ Pos: call.Args[0].Pos(), End: call.Args[1].End(), NewText: analysisutil.NodeBytes(pass.Fset, survivingArg), - }), - ) + }) } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go index cfb47b5426..4f6e3f9c44 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go @@ -213,9 +213,9 @@ func isFormatter(typ types.Type) bool { types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune]) } -// isTypeParam reports whether t is a type parameter. +// isTypeParam reports whether t is a type parameter (or an alias of one). func isTypeParam(t types.Type) bool { - _, ok := t.(*types.TypeParam) + _, ok := types.Unalias(t).(*types.TypeParam) return ok } @@ -224,7 +224,7 @@ func isTypeParam(t types.Type) bool { // This function avoids allocating the concatenation of "pkg.Name", // which is important for the performance of syntax matching. func isNamedType(t types.Type, pkgPath string, names ...string) bool { - n, ok := t.(*types.Named) + n, ok := types.Unalias(t).(*types.Named) if !ok { return false } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go new file mode 100644 index 0000000000..d634b74bd8 --- /dev/null +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go @@ -0,0 +1,44 @@ +package checkers + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" +) + +// Regexp detects situations like +// +// assert.Regexp(t, regexp.MustCompile(`\[.*\] DEBUG \(.*TestNew.*\): message`), out) +// assert.NotRegexp(t, regexp.MustCompile(`\[.*\] TRACE message`), out) +// +// and requires +// +// assert.Regexp(t, `\[.*\] DEBUG \(.*TestNew.*\): message`, out) +// assert.NotRegexp(t, `\[.*\] TRACE message`, out) +type Regexp struct{} + +// NewRegexp constructs Regexp checker. +func NewRegexp() Regexp { return Regexp{} } +func (Regexp) Name() string { return "regexp" } + +func (checker Regexp) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { + switch call.Fn.NameFTrimmed { + default: + return nil + case "Regexp", "NotRegexp": + } + + if len(call.Args) < 1 { + return nil + } + + ce, ok := call.Args[0].(*ast.CallExpr) + if !ok || len(ce.Args) != 1 { + return nil + } + + if isRegexpMustCompileCall(pass, ce) { + return newRemoveMustCompileDiagnostic(pass, checker.Name(), call, ce, ce.Args[0]) + } + return nil +} diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go index 4303828fd9..e4e30aaf4a 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go @@ -134,7 +134,7 @@ func (checker RequireError) Check(pass *analysis.Pass, inspector *inspector.Insp } diagnostics = append(diagnostics, - *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport, nil)) + *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport)) } } @@ -197,11 +197,10 @@ func findRootIf(stack []ast.Node) *ast.IfStmt { nearestIf, i := findNearestNodeWithIdx[*ast.IfStmt](stack) for ; i > 0; i-- { parent, ok := stack[i-1].(*ast.IfStmt) - if ok { - nearestIf = parent - } else { + if !ok { break } + nearestIf = parent } return nearestIf } diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go index f830fd2a58..4374c9359b 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go @@ -68,7 +68,7 @@ func (checker SuiteBrokenParallel) Check(pass *analysis.Pass, insp *inspector.In } nextLine := pass.Fset.Position(ce.Pos()).Line + 1 - d := newDiagnostic(checker.Name(), ce, report, &analysis.SuggestedFix{ + d := newDiagnostic(checker.Name(), ce, report, analysis.SuggestedFix{ Message: fmt.Sprintf("Remove `%s` call", analysisutil.NodeString(pass.Fset, ce)), TextEdits: []analysis.TextEdit{ { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go index 6150ae78da..4fbfbe7e09 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go @@ -60,7 +60,7 @@ func (checker SuiteDontUsePkg) Check(pass *analysis.Pass, call *CallMeta) *analy } msg := fmt.Sprintf("use %s.%s", newSelector, call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ + return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{ Message: fmt.Sprintf("Replace `%s` with `%s`", call.SelectorXStr, newSelector), TextEdits: []analysis.TextEdit{ // Replace package function with suite method. diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go index 9adfe5190c..fdea324fd1 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go @@ -61,7 +61,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) * } msg := fmt.Sprintf("use an explicit %s.Assert().%s", analysisutil.NodeString(pass.Fset, x), call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ + return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{ Message: "Add `Assert()` call", TextEdits: []analysis.TextEdit{{ Pos: x.End(), @@ -85,7 +85,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) * } msg := fmt.Sprintf("need to simplify the assertion to %s.%s", analysisutil.NodeString(pass.Fset, se.X), call.Fn.Name) - return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{ + return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{ Message: "Remove `Assert()` call", TextEdits: []analysis.TextEdit{{ Pos: se.Sel.Pos(), diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go index 67d9c252b8..525d5ffd86 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go @@ -53,7 +53,7 @@ func (checker SuiteSubtestRun) Check(pass *analysis.Pass, insp *inspector.Inspec if implementsTestifySuite(pass, tCallSel.X) && implementsTestingT(pass, tCall) { msg := fmt.Sprintf("use %s.Run to run subtest", analysisutil.NodeString(pass.Fset, tCallSel.X)) - diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), ce, msg, nil)) + diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), ce, msg)) } }) return diagnostics diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go index 59455290d4..ef8d821321 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go @@ -50,8 +50,8 @@ func (checker SuiteTHelper) Check(pass *analysis.Pass, inspector *inspector.Insp return } - msg := fmt.Sprintf("suite helper method must start with " + helperCallStr) - d := newDiagnostic(checker.Name(), fd, msg, &analysis.SuggestedFix{ + msg := "suite helper method must start with " + helperCallStr + d := newDiagnostic(checker.Name(), fd, msg, analysis.SuggestedFix{ Message: fmt.Sprintf("Insert `%s`", helperCallStr), TextEdits: []analysis.TextEdit{ { diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go index 6f206d0958..045706e5dc 100644 --- a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go +++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go @@ -10,15 +10,40 @@ import ( // UselessAssert detects useless asserts like // -// 1) Asserting of the same variable -// +// assert.Contains(t, tt.value, tt.value) +// assert.ElementsMatch(t, tt.value, tt.value) // assert.Equal(t, tt.value, tt.value) -// assert.ElementsMatch(t, users, users) +// assert.EqualExportedValues(t, tt.value, tt.value) // ... +// // assert.True(t, num > num) +// assert.True(t, num < num) +// assert.True(t, num >= num) +// assert.True(t, num <= num) +// assert.True(t, num == num) +// assert.True(t, num != num) +// +// assert.False(t, num > num) +// assert.False(t, num < num) +// assert.False(t, num >= num) +// assert.False(t, num <= num) // assert.False(t, num == num) +// assert.False(t, num != num) // -// 2) Open for contribution... +// assert.Empty(t, "") +// assert.False(t, false) +// assert.Implements(t, (*any)(nil), new(Conn)) +// assert.Negative(t, -42) +// assert.Nil(t, nil) +// assert.NoError(t, nil) +// assert.NotEmpty(t, "value") +// assert.NotZero(t, 42) +// assert.NotZero(t, "value") +// assert.Positive(t, 42) +// assert.True(t, true) +// assert.Zero(t, 0) +// assert.Zero(t, "") +// assert.Zero(t, nil) type UselessAssert struct{} // NewUselessAssert constructs UselessAssert checker. @@ -26,6 +51,58 @@ func NewUselessAssert() UselessAssert { return UselessAssert{} } func (UselessAssert) Name() string { return "useless-assert" } func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { + if d := checker.checkSameVars(pass, call); d != nil { + return d + } + + var isMeaningless bool + switch call.Fn.NameFTrimmed { + case "Empty": + isMeaningless = (len(call.Args) >= 1) && isEmptyStringLit(call.Args[0]) + + case "False": + isMeaningless = (len(call.Args) >= 1) && isUntypedFalse(pass, call.Args[0]) + + case "Implements": + if len(call.Args) < 2 { + return nil + } + + elem, ok := isPointer(pass, call.Args[0]) + isMeaningless = ok && isEmptyInterfaceType(elem) + + case "Negative": + isMeaningless = (len(call.Args) >= 1) && isNegativeIntNumber(call.Args[0]) + + case "Nil", "NoError": + isMeaningless = (len(call.Args) >= 1) && isNil(call.Args[0]) + + case "NotEmpty": + isMeaningless = (len(call.Args) >= 1) && isNotEmptyStringLit(call.Args[0]) + + case "NotZero": + isMeaningless = (len(call.Args) >= 1) && + (isNotEmptyStringLit(call.Args[0]) || + isNegativeIntNumber(call.Args[0]) || isPositiveIntNumber(call.Args[0])) + + case "Positive": + isMeaningless = (len(call.Args) >= 1) && isPositiveIntNumber(call.Args[0]) + + case "True": + isMeaningless = (len(call.Args) >= 1) && isUntypedTrue(pass, call.Args[0]) + + case "Zero": + isMeaningless = (len(call.Args) >= 1) && + (isZero(call.Args[0]) || isEmptyStringLit(call.Args[0]) || isNil(call.Args[0])) + } + + if isMeaningless { + return newDiagnostic(checker.Name(), call, "meaningless assertion") + } + return nil +} + +func (checker UselessAssert) checkSameVars(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic { var first, second ast.Node switch call.Fn.NameFTrimmed { @@ -82,7 +159,7 @@ func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysi } if analysisutil.NodeString(pass.Fset, first) == analysisutil.NodeString(pass.Fset, second) { - return newDiagnostic(checker.Name(), call, "asserting of the same variable", nil) + return newDiagnostic(checker.Name(), call, "asserting of the same variable") } return nil } diff --git a/vendor/github.com/alecthomas/go-check-sumtype/README.md b/vendor/github.com/alecthomas/go-check-sumtype/README.md index 36614ef400..2ccec4e848 100644 --- a/vendor/github.com/alecthomas/go-check-sumtype/README.md +++ b/vendor/github.com/alecthomas/go-check-sumtype/README.md @@ -86,7 +86,8 @@ mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing ``` Adding either a `default` clause or a clause to handle `*VariantB` will cause -exhaustive checks to pass. +exhaustive checks to pass. To prevent `default` clauses from automatically +passing checks, set the `-default-signifies-exhasutive=false` flag. As a special case, if the type switch statement contains a `default` clause that always panics, then exhaustiveness checks are still performed. diff --git a/vendor/github.com/alecthomas/go-check-sumtype/check.go b/vendor/github.com/alecthomas/go-check-sumtype/check.go index 21d751af42..1a0a32517b 100644 --- a/vendor/github.com/alecthomas/go-check-sumtype/check.go +++ b/vendor/github.com/alecthomas/go-check-sumtype/check.go @@ -39,7 +39,7 @@ func (e inexhaustiveError) Names() []string { // check does exhaustiveness checking for the given sum type definitions in the // given package. Every instance of inexhaustive case analysis is returned. -func check(pkg *packages.Package, defs []sumTypeDef) []error { +func check(pkg *packages.Package, defs []sumTypeDef, config Config) []error { var errs []error for _, astfile := range pkg.Syntax { ast.Inspect(astfile, func(n ast.Node) bool { @@ -47,7 +47,7 @@ func check(pkg *packages.Package, defs []sumTypeDef) []error { if !ok { return true } - if err := checkSwitch(pkg, defs, swtch); err != nil { + if err := checkSwitch(pkg, defs, swtch, config); err != nil { errs = append(errs, err) } return true @@ -67,8 +67,9 @@ func checkSwitch( pkg *packages.Package, defs []sumTypeDef, swtch *ast.TypeSwitchStmt, + config Config, ) error { - def, missing := missingVariantsInSwitch(pkg, defs, swtch) + def, missing := missingVariantsInSwitch(pkg, defs, swtch, config) if len(missing) > 0 { return inexhaustiveError{ Position: pkg.Fset.Position(swtch.Pos()), @@ -87,6 +88,7 @@ func missingVariantsInSwitch( pkg *packages.Package, defs []sumTypeDef, swtch *ast.TypeSwitchStmt, + config Config, ) (*sumTypeDef, []types.Object) { asserted := findTypeAssertExpr(swtch) ty := pkg.TypesInfo.TypeOf(asserted) @@ -97,7 +99,7 @@ func missingVariantsInSwitch( return nil, nil } variantExprs, hasDefault := switchVariants(swtch) - if hasDefault && !defaultClauseAlwaysPanics(swtch) { + if config.DefaultSignifiesExhaustive && hasDefault && !defaultClauseAlwaysPanics(swtch) { // A catch-all case defeats all exhaustiveness checks. return def, nil } diff --git a/vendor/github.com/alecthomas/go-check-sumtype/config.go b/vendor/github.com/alecthomas/go-check-sumtype/config.go new file mode 100644 index 0000000000..759176eb7d --- /dev/null +++ b/vendor/github.com/alecthomas/go-check-sumtype/config.go @@ -0,0 +1,5 @@ +package gochecksumtype + +type Config struct { + DefaultSignifiesExhaustive bool +} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5 b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5 new file mode 100644 index 0000000000..77c7b016cc --- /dev/null +++ b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5 @@ -0,0 +1,18 @@ +{ + $schema: "https://docs.renovatebot.com/renovate-schema.json", + extends: [ + "config:recommended", + ":semanticCommits", + ":semanticCommitTypeAll(chore)", + ":semanticCommitScope(deps)", + "group:allNonMajor", + "schedule:earlyMondays", // Run once a week. + ], + packageRules: [ + { + matchPackageNames: ["golangci-lint"], + matchManagers: ["hermit"], + enabled: false, + }, + ], +} diff --git a/vendor/github.com/alecthomas/go-check-sumtype/run.go b/vendor/github.com/alecthomas/go-check-sumtype/run.go index fdcb643c5d..f32942d7a0 100644 --- a/vendor/github.com/alecthomas/go-check-sumtype/run.go +++ b/vendor/github.com/alecthomas/go-check-sumtype/run.go @@ -3,7 +3,7 @@ package gochecksumtype import "golang.org/x/tools/go/packages" // Run sumtype checking on the given packages. -func Run(pkgs []*packages.Package) []error { +func Run(pkgs []*packages.Package, config Config) []error { var errs []error decls, err := findSumTypeDecls(pkgs) @@ -18,7 +18,7 @@ func Run(pkgs []*packages.Package) []error { } for _, pkg := range pkgs { - if pkgErrs := check(pkg, defs); pkgErrs != nil { + if pkgErrs := check(pkg, defs, config); pkgErrs != nil { errs = append(errs, pkgErrs...) } } diff --git a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go index f78bb8cb6c..a557359288 100644 --- a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go +++ b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go @@ -22,10 +22,11 @@ import ( const pwd = "./" -func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer { +func NakedReturnAnalyzer(defaultLines uint, skipTestFiles bool) *analysis.Analyzer { nakedRet := &NakedReturnRunner{} flags := flag.NewFlagSet("nakedret", flag.ExitOnError) flags.UintVar(&nakedRet.MaxLength, "l", defaultLines, "maximum number of lines for a naked return function") + flags.BoolVar(&nakedRet.SkipTestFiles, "skip-test-files", skipTestFiles, "set to true to skip test files") var analyzer = &analysis.Analyzer{ Name: "nakedret", Doc: "Checks that functions with naked returns are not longer than a maximum size (can be zero).", @@ -37,7 +38,8 @@ func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer { } type NakedReturnRunner struct { - MaxLength uint + MaxLength uint + SkipTestFiles bool } func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) { @@ -49,18 +51,20 @@ func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) { (*ast.ReturnStmt)(nil), } retVis := &returnsVisitor{ - pass: pass, - f: pass.Fset, - maxLength: n.MaxLength, + pass: pass, + f: pass.Fset, + maxLength: n.MaxLength, + skipTestFiles: n.SkipTestFiles, } inspector.Nodes(nodeFilter, retVis.NodesVisit) return nil, nil } type returnsVisitor struct { - pass *analysis.Pass - f *token.FileSet - maxLength uint + pass *analysis.Pass + f *token.FileSet + maxLength uint + skipTestFiles bool // functions contains funcInfo for each nested function definition encountered while visiting the AST. functions []funcInfo @@ -74,7 +78,7 @@ type funcInfo struct { reportNaked bool } -func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error { +func checkNakedReturns(args []string, maxLength *uint, skipTestFiles bool, setExitStatus bool) error { fset := token.NewFileSet() @@ -87,7 +91,7 @@ func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error return errors.New("max length nil") } - analyzer := NakedReturnAnalyzer(*maxLength) + analyzer := NakedReturnAnalyzer(*maxLength, skipTestFiles) pass := &analysis.Pass{ Analyzer: analyzer, Fset: fset, @@ -292,6 +296,9 @@ func (v *returnsVisitor) NodesVisit(node ast.Node, push bool) bool { if push && funcType != nil { // Push function info to track returns for this function file := v.f.File(node.Pos()) + if v.skipTestFiles && strings.HasSuffix(file.Name(), "_test.go") { + return false + } length := file.Position(node.End()).Line - file.Position(node.Pos()).Line if length == 0 { // consider functions that finish on the same line as they start as single line functions, not zero lines! diff --git a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go index eaf408d6f3..1972379df4 100644 --- a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go +++ b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go @@ -19,9 +19,13 @@ var ( skipTests bool ) +const ( + defaultMaxComplexity = 10 +) + //nolint:gochecknoinits func init() { - flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have") + flagSet.IntVar(&maxComplexity, "maxComplexity", defaultMaxComplexity, "max complexity the function can have") flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max average complexity in package") flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well") } @@ -29,7 +33,7 @@ func init() { func NewAnalyzer() *analysis.Analyzer { return &analysis.Analyzer{ Name: "cyclop", - Doc: "calculates cyclomatic complexity", + Doc: "checks function and package cyclomatic complexity", Run: run, Flags: flagSet, } @@ -40,9 +44,9 @@ func run(pass *analysis.Pass) (interface{}, error) { var pkgName string var pkgPos token.Pos - for _, f := range pass.Files { - ast.Inspect(f, func(node ast.Node) bool { - f, ok := node.(*ast.FuncDecl) + for _, file := range pass.Files { + ast.Inspect(file, func(node ast.Node) bool { + funcDecl, ok := node.(*ast.FuncDecl) if !ok { if node == nil { return true @@ -55,15 +59,15 @@ func run(pass *analysis.Pass) (interface{}, error) { return true } - if skipTests && testFunc(f) { + if skipTests && testFunc(funcDecl) { return true } count++ - comp := complexity(f) + comp := complexity(funcDecl) sum += float64(comp) if comp > maxComplexity { - pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity) + pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", funcDecl.Name.Name, comp, maxComplexity) } return true diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go index f1bf20faba..39d3cd44ec 100644 --- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go +++ b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go @@ -14,7 +14,7 @@ import ( ) const ( - doc = "bidichk detects dangerous unicode character sequences" + doc = "Checks for dangerous unicode character sequences" disallowedDoc = `comma separated list of disallowed runes (full name or short name) Supported runes @@ -142,25 +142,28 @@ func NewAnalyzer() *analysis.Analyzer { } func (b bidichk) run(pass *analysis.Pass) (interface{}, error) { - var err error + readFile := pass.ReadFile + if readFile == nil { + readFile = os.ReadFile + } - pass.Fset.Iterate(func(f *token.File) bool { - if strings.HasPrefix(f.Name(), "$GOROOT") { - return true + for _, astFile := range pass.Files { + f := pass.Fset.File(astFile.FileStart) + if f == nil { + continue } - return b.check(f.Name(), f.Pos(0), pass) == nil - }) - - return nil, err -} + body, err := readFile(f.Name()) + if err != nil { + return nil, err + } -func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) error { - body, err := os.ReadFile(filename) - if err != nil { - return err + b.check(body, f.Pos(0), pass) } + return nil, nil +} +func (b bidichk) check(body []byte, pos token.Pos, pass *analysis.Pass) { for name, r := range b.disallowedRunes { start := 0 for { @@ -175,6 +178,4 @@ func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) erro start += utf8.RuneLen(r) } } - - return nil } diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml index a05c172cb6..1113690539 100644 --- a/vendor/github.com/breml/errchkjson/.goreleaser.yml +++ b/vendor/github.com/breml/errchkjson/.goreleaser.yml @@ -1,3 +1,6 @@ +--- +version: 2 + # This is an example .goreleaser.yml file with some sane defaults. # Make sure to check the documentation at http://goreleaser.com before: @@ -23,9 +26,9 @@ archives: {{- else }}{{ .Arch }}{{ end }} {{- if .Arm }}v{{ .Arm }}{{ end -}} snapshot: - name_template: "{{ .Tag }}-next" + version_template: "{{ .Tag }}-next" changelog: - skip: true + disable: true release: github: owner: breml diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md index 1979597387..a387ea23d2 100644 --- a/vendor/github.com/breml/errchkjson/README.md +++ b/vendor/github.com/breml/errchkjson/README.md @@ -55,7 +55,7 @@ response type, the linter will warn you. Download `errchkjson` from the [releases](https://github.com/breml/errchkjson/releases) or get the latest version from source with: ```shell -go get github.com/breml/errchkjson/cmd/errchkjson +go install github.com/breml/errchkjson/cmd/errchkjson@latest ``` ## Usage diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go index 4a23929cf2..7c8cd82e96 100644 --- a/vendor/github.com/breml/errchkjson/errchkjson.go +++ b/vendor/github.com/breml/errchkjson/errchkjson.go @@ -25,7 +25,7 @@ func NewAnalyzer() *analysis.Analyzer { a := &analysis.Analyzer{ Name: "errchkjson", - Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.", + Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occurrences where the check for the returned error can be omitted.", Run: errchkjson.run, } diff --git a/vendor/github.com/ckaznocha/intrange/intrange.go b/vendor/github.com/ckaznocha/intrange/intrange.go index 33cddf3035..44a15091e7 100644 --- a/vendor/github.com/ckaznocha/intrange/intrange.go +++ b/vendor/github.com/ckaznocha/intrange/intrange.go @@ -25,8 +25,9 @@ var ( ) const ( - msg = "for loop can be changed to use an integer range (Go 1.22+)" - msgLenRange = "for loop can be changed to `i := range %s`" + msg = "for loop can be changed to use an integer range (Go 1.22+)" + msgLenRange = "for loop can be changed to `%s := range %s`" + msgLenRangeNoIdent = "for loop can be changed to `range %s`" ) func run(pass *analysis.Pass) (any, error) { @@ -243,21 +244,26 @@ func checkForStmt(pass *analysis.Pass, forStmt *ast.ForStmt) { } func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) { - if rangeStmt.Key == nil { + if rangeStmt.Value != nil { return } - ident, ok := rangeStmt.Key.(*ast.Ident) - if !ok { - return - } + startPos := rangeStmt.Range + usesKey := rangeStmt.Key != nil + identName := "" - if ident.Name == "_" { - return - } + if usesKey { + ident, ok := rangeStmt.Key.(*ast.Ident) + if !ok { + return + } - if rangeStmt.Value != nil { - return + if ident.Name == "_" { + usesKey = false + } + + identName = ident.Name + startPos = ident.Pos() } if rangeStmt.X == nil { @@ -295,18 +301,40 @@ func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) { return } + if usesKey { + pass.Report(analysis.Diagnostic{ + Pos: startPos, + End: x.End(), + Message: fmt.Sprintf(msgLenRange, identName, arg.Name), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name), + TextEdits: []analysis.TextEdit{ + { + Pos: x.Pos(), + End: x.End(), + NewText: []byte(arg.Name), + }, + }, + }, + }, + }) + + return + } + pass.Report(analysis.Diagnostic{ - Pos: ident.Pos(), + Pos: startPos, End: x.End(), - Message: fmt.Sprintf(msgLenRange, arg.Name), + Message: fmt.Sprintf(msgLenRangeNoIdent, arg.Name), SuggestedFixes: []analysis.SuggestedFix{ { Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name), TextEdits: []analysis.TextEdit{ { - Pos: x.Pos(), + Pos: startPos, End: x.End(), - NewText: []byte(arg.Name), + NewText: []byte(fmt.Sprintf("range %s", arg.Name)), }, }, }, diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md index be82827cac..d135bfe023 100644 --- a/vendor/github.com/fatih/color/README.md +++ b/vendor/github.com/fatih/color/README.md @@ -9,7 +9,7 @@ suits you. ## Install -```bash +``` go get github.com/fatih/color ``` @@ -30,6 +30,18 @@ color.Magenta("And many others ..") ``` +### RGB colors + +If your terminal supports 24-bit colors, you can use RGB color codes. + +```go +color.RGB(255, 128, 0).Println("foreground orange") +color.RGB(230, 42, 42).Println("foreground red") + +color.BgRGB(255, 128, 0).Println("background orange") +color.BgRGB(230, 42, 42).Println("background red") +``` + ### Mix and reuse colors ```go @@ -49,6 +61,11 @@ boldRed.Println("This will print text in bold red.") whiteBackground := red.Add(color.BgWhite) whiteBackground.Println("Red text with white background.") + +// Mix with RGB color codes +color.RGB(255, 128, 0).AddBgRGB(0, 0, 0).Println("orange with black background") + +color.BgRGB(255, 128, 0).AddRGB(255, 255, 255).Println("orange background with white foreground") ``` ### Use your own output (io.Writer) @@ -161,10 +178,6 @@ c.Println("This prints again cyan...") To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams. -## Todo - -* Save/Return previous values -* Evaluate fmt.Formatter interface ## Credits diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go index 81094e87c5..ee39b408e9 100644 --- a/vendor/github.com/fatih/color/color.go +++ b/vendor/github.com/fatih/color/color.go @@ -98,6 +98,9 @@ const ( FgMagenta FgCyan FgWhite + + // used internally for 256 and 24-bit coloring + foreground ) // Foreground Hi-Intensity text colors @@ -122,6 +125,9 @@ const ( BgMagenta BgCyan BgWhite + + // used internally for 256 and 24-bit coloring + background ) // Background Hi-Intensity text colors @@ -150,6 +156,30 @@ func New(value ...Attribute) *Color { return c } +// RGB returns a new foreground color in 24-bit RGB. +func RGB(r, g, b int) *Color { + return New(foreground, 2, Attribute(r), Attribute(g), Attribute(b)) +} + +// BgRGB returns a new background color in 24-bit RGB. +func BgRGB(r, g, b int) *Color { + return New(background, 2, Attribute(r), Attribute(g), Attribute(b)) +} + +// AddRGB is used to chain foreground RGB SGR parameters. Use as many as parameters to combine +// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0). +func (c *Color) AddRGB(r, g, b int) *Color { + c.params = append(c.params, foreground, 2, Attribute(r), Attribute(g), Attribute(b)) + return c +} + +// AddRGB is used to chain background RGB SGR parameters. Use as many as parameters to combine +// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0). +func (c *Color) AddBgRGB(r, g, b int) *Color { + c.params = append(c.params, background, 2, Attribute(r), Attribute(g), Attribute(b)) + return c +} + // Set sets the given parameters immediately. It will change the color of // output with the given SGR parameters until color.Unset() is called. func Set(p ...Attribute) *Color { @@ -401,7 +431,7 @@ func (c *Color) format() string { func (c *Color) unformat() string { //return fmt.Sprintf("%s[%dm", escape, Reset) - //for each element in sequence let's use the speficic reset escape, ou the generic one if not found + //for each element in sequence let's use the specific reset escape, or the generic one if not found format := make([]string, len(c.params)) for i, v := range c.params { format[i] = strconv.Itoa(int(Reset)) diff --git a/vendor/github.com/ghostiam/protogetter/processor.go b/vendor/github.com/ghostiam/protogetter/processor.go index eca82939d8..44f346e850 100644 --- a/vendor/github.com/ghostiam/protogetter/processor.go +++ b/vendor/github.com/ghostiam/protogetter/processor.go @@ -218,7 +218,7 @@ func (c *processor) processInner(expr ast.Expr) { c.write("*") c.processInner(x.X) - case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit: + case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit, *ast.SliceExpr: // Process the node as is. c.write(formatNode(x)) diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go index 306756834b..345274f1c8 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go @@ -82,7 +82,7 @@ func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) { c.ctx.Warn(cause, "type is not defined %s", concrete) } -func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) { +func (c *caseOrderChecker) checkSwitch(_ *ast.SwitchStmt) { // TODO(quasilyte): can handle expression cases that overlap. // Cases that have narrower value range should go before wider ones. } diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go index 1f6e948d5c..96d2dd0e6f 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go +++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go @@ -17,7 +17,7 @@ type WalkHandler struct { // EnterFile is a default walkerEvents.EnterFile implementation // that reports every file as accepted candidate for checking. -func (w *WalkHandler) EnterFile(f *ast.File) bool { +func (w *WalkHandler) EnterFile(_ *ast.File) bool { return true } diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go new file mode 100644 index 0000000000..f4851d4024 --- /dev/null +++ b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go @@ -0,0 +1,100 @@ +package checkers + +import ( + "go/ast" + "go/token" + + "github.com/go-critic/go-critic/checkers/internal/astwalk" + "github.com/go-critic/go-critic/linter" + "github.com/go-toolsmith/astcast" + "golang.org/x/tools/go/ast/astutil" +) + +func init() { + var info linter.CheckerInfo + info.Name = "rangeAppendAll" + info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag} + info.Summary = "Detects append all its data while range it" + info.Before = `for _, n := range ns { + ... + rs = append(rs, ns...) // append all slice data + } +}` + info.After = `for _, n := range ns { + ... + rs = append(rs, n) + } +}` + + collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) { + c := &rangeAppendAllChecker{ctx: ctx} + return astwalk.WalkerForStmt(c), nil + }) +} + +type rangeAppendAllChecker struct { + astwalk.WalkHandler + ctx *linter.CheckerContext +} + +func (c *rangeAppendAllChecker) VisitStmt(stmt ast.Stmt) { + rangeStmt, ok := stmt.(*ast.RangeStmt) + if !ok || len(rangeStmt.Body.List) == 0 { + return + } + rangeIdent, ok := rangeStmt.X.(*ast.Ident) + if !ok { + return + } + rangeObj := c.ctx.TypesInfo.ObjectOf(rangeIdent) + + astutil.Apply(rangeStmt.Body, nil, func(cur *astutil.Cursor) bool { + appendFrom := c.getValidAppendFrom(cur.Node()) + if appendFrom != nil { + appendFromObj := c.ctx.TypesInfo.ObjectOf(appendFrom) + if appendFromObj == rangeObj { + c.warn(appendFrom) + } + } + return true + }) +} + +func (c *rangeAppendAllChecker) getValidAppendFrom(expr ast.Node) *ast.Ident { + call := astcast.ToCallExpr(expr) + if len(call.Args) != 2 || call.Ellipsis == token.NoPos { + return nil + } + if qualifiedName(call.Fun) != "append" { + return nil + } + if c.isSliceLiteral(call.Args[0]) { + return nil + } + appendFrom, ok := call.Args[1].(*ast.Ident) + if !ok { + return nil + } + return appendFrom +} + +func (c *rangeAppendAllChecker) isSliceLiteral(arg ast.Expr) bool { + switch v := arg.(type) { + // []T{}, []T{n} + case *ast.CompositeLit: + return true + // []T(nil) + case *ast.CallExpr: + if astcast.ToArrayType(v.Fun) != astcast.NilArrayType && len(v.Args) == 1 { + id := astcast.ToIdent(v.Args[0]) + return id.Name == "nil" && id.Obj == nil + } + return false + default: + return false + } +} + +func (c *rangeAppendAllChecker) warn(appendFrom *ast.Ident) { + c.ctx.Warn(appendFrom, "append all `%s` data while range it", appendFrom) +} diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go index 29723a69a9..485819842c 100644 --- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go +++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go @@ -87,7 +87,7 @@ func newErrorHandler(failOnErrorFlag string) (*parseErrorHandler, error) { failOnErrorPredicates := map[string]func(error) bool{ "dsl": func(err error) bool { var e *ruleguard.ImportError; return !errors.As(err, &e) }, "import": func(err error) bool { var e *ruleguard.ImportError; return errors.As(err, &e) }, - "all": func(err error) bool { return true }, + "all": func(_ error) bool { return true }, } for _, k := range strings.Split(failOnErrorFlag, ",") { if k == "" { diff --git a/vendor/github.com/go-critic/go-critic/linter/helpers.go b/vendor/github.com/go-critic/go-critic/linter/helpers.go index 0a3fc0292f..d5110df642 100644 --- a/vendor/github.com/go-critic/go-critic/linter/helpers.go +++ b/vendor/github.com/go-critic/go-critic/linter/helpers.go @@ -116,7 +116,7 @@ func validateCheckerName(info *CheckerInfo) error { return nil } -func validateCheckerDocumentation(info *CheckerInfo) error { +func validateCheckerDocumentation(_ *CheckerInfo) error { // TODO(quasilyte): validate documentation. return nil } diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go index 2523c6ad98..1f3c69d4b8 100644 --- a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go +++ b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go @@ -6,6 +6,7 @@ import ( "fmt" "net" "net/netip" + "net/url" "reflect" "strconv" "strings" @@ -176,6 +177,26 @@ func StringToTimeDurationHookFunc() DecodeHookFunc { } } +// StringToURLHookFunc returns a DecodeHookFunc that converts +// strings to *url.URL. +func StringToURLHookFunc() DecodeHookFunc { + return func( + f reflect.Type, + t reflect.Type, + data interface{}, + ) (interface{}, error) { + if f.Kind() != reflect.String { + return data, nil + } + if t != reflect.TypeOf(&url.URL{}) { + return data, nil + } + + // Convert it by parsing + return url.Parse(data.(string)) + } +} + // StringToIPHookFunc returns a DecodeHookFunc that converts // strings to net.IP func StringToIPHookFunc() DecodeHookFunc { diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go index 1cd6204bb0..e77e63ba38 100644 --- a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go +++ b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go @@ -278,6 +278,10 @@ type DecoderConfig struct { // field name or tag. Defaults to `strings.EqualFold`. This can be used // to implement case-sensitive tag values, support snake casing, etc. MatchName func(mapKey, fieldName string) bool + + // DecodeNil, if set to true, will cause the DecodeHook (if present) to run + // even if the input is nil. This can be used to provide default values. + DecodeNil bool } // A Decoder takes a raw interface value and turns it into structured @@ -438,19 +442,26 @@ func (d *Decoder) Decode(input interface{}) error { return err } +// isNil returns true if the input is nil or a typed nil pointer. +func isNil(input interface{}) bool { + if input == nil { + return true + } + val := reflect.ValueOf(input) + return val.Kind() == reflect.Ptr && val.IsNil() +} + // Decodes an unknown data type into a specific reflection value. func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error { - var inputVal reflect.Value - if input != nil { - inputVal = reflect.ValueOf(input) - - // We need to check here if input is a typed nil. Typed nils won't - // match the "input == nil" below so we check that here. - if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() { - input = nil - } + var ( + inputVal = reflect.ValueOf(input) + outputKind = getKind(outVal) + decodeNil = d.config.DecodeNil && d.cachedDecodeHook != nil + ) + if isNil(input) { + // Typed nils won't match the "input == nil" below, so reset input. + input = nil } - if input == nil { // If the data is nil, then we don't set anything, unless ZeroFields is set // to true. @@ -461,17 +472,31 @@ func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) e d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) } } - return nil + if !decodeNil { + return nil + } } - if !inputVal.IsValid() { - // If the input value is invalid, then we just set the value - // to be the zero value. - outVal.Set(reflect.Zero(outVal.Type())) - if d.config.Metadata != nil && name != "" { - d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + if !decodeNil { + // If the input value is invalid, then we just set the value + // to be the zero value. + outVal.Set(reflect.Zero(outVal.Type())) + if d.config.Metadata != nil && name != "" { + d.config.Metadata.Keys = append(d.config.Metadata.Keys, name) + } + return nil + } + // Hooks need a valid inputVal, so reset it to zero value of outVal type. + switch outputKind { + case reflect.Struct, reflect.Map: + var mapVal map[string]interface{} + inputVal = reflect.ValueOf(mapVal) // create nil map pointer + case reflect.Slice, reflect.Array: + var sliceVal []interface{} + inputVal = reflect.ValueOf(sliceVal) // create nil slice pointer + default: + inputVal = reflect.Zero(outVal.Type()) } - return nil } if d.cachedDecodeHook != nil { @@ -482,9 +507,11 @@ func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) e return fmt.Errorf("error decoding '%s': %w", name, err) } } + if isNil(input) { + return nil + } var err error - outputKind := getKind(outVal) addMetaKey := true switch outputKind { case reflect.Bool: @@ -765,8 +792,8 @@ func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) e } default: return fmt.Errorf( - "'%s' expected type '%s', got unconvertible type '%s', value: '%v'", - name, val.Type(), dataVal.Type(), data) + "'%s' expected type '%s', got unconvertible type '%#v', value: '%#v'", + name, val, dataVal, data) } return nil diff --git a/vendor/github.com/jirfag/go-printf-func-name/LICENSE b/vendor/github.com/golangci/go-printf-func-name/LICENSE similarity index 96% rename from vendor/github.com/jirfag/go-printf-func-name/LICENSE rename to vendor/github.com/golangci/go-printf-func-name/LICENSE index d06a809c26..4585140d18 100644 --- a/vendor/github.com/jirfag/go-printf-func-name/LICENSE +++ b/vendor/github.com/golangci/go-printf-func-name/LICENSE @@ -1,5 +1,6 @@ MIT License +Copyright (c) 2024 Golangci-lint authors Copyright (c) 2020 Isaev Denis Permission is hereby granted, free of charge, to any person obtaining a copy diff --git a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go similarity index 93% rename from vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go rename to vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go index 7937dd4337..bce4b242ed 100644 --- a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go +++ b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go @@ -4,10 +4,9 @@ import ( "go/ast" "strings" + "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - - "golang.org/x/tools/go/analysis" ) var Analyzer = &analysis.Analyzer{ @@ -18,12 +17,13 @@ var Analyzer = &analysis.Analyzer{ } func run(pass *analysis.Pass) (interface{}, error) { - inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + nodeFilter := []ast.Node{ (*ast.FuncDecl)(nil), } - inspector.Preorder(nodeFilter, func(node ast.Node) { + insp.Preorder(nodeFilter, func(node ast.Node) { funcDecl := node.(*ast.FuncDecl) if res := funcDecl.Type.Results; res != nil && len(res.List) != 0 { diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go index 299fd52790..c249084e1c 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go +++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go @@ -1,525 +1,298 @@ -// Copyright 2017 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package cache implements a build artifact cache. -// -// This package is a slightly modified fork of Go's -// cmd/go/internal/cache package. package cache import ( "bytes" - "crypto/sha256" + "encoding/gob" "encoding/hex" "errors" "fmt" - "io" - "os" - "path/filepath" - "strconv" + "runtime" + "slices" "strings" - "time" + "sync" - "github.com/golangci/golangci-lint/internal/renameio" - "github.com/golangci/golangci-lint/internal/robustio" + "golang.org/x/exp/maps" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/go/cache" + "github.com/golangci/golangci-lint/pkg/logutils" + "github.com/golangci/golangci-lint/pkg/timeutils" +) + +type HashMode int + +const ( + HashModeNeedOnlySelf HashMode = iota + HashModeNeedDirectDeps + HashModeNeedAllDeps ) -// An ActionID is a cache action key, the hash of a complete description of a -// repeatable computation (command line, environment variables, -// input file contents, executable contents). -type ActionID [HashSize]byte +var ErrMissing = errors.New("missing data") -// An OutputID is a cache output key, the hash of an output of a computation. -type OutputID [HashSize]byte +type hashResults map[HashMode]string -// A Cache is a package cache, backed by a file system directory tree. +// Cache is a per-package data cache. +// A cached data is invalidated when package, +// or it's dependencies change. type Cache struct { - dir string - now func() time.Time + lowLevelCache cache.Cache + pkgHashes sync.Map + sw *timeutils.Stopwatch + log logutils.Log + ioSem chan struct{} // semaphore limiting parallel IO +} + +func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) { + return &Cache{ + lowLevelCache: cache.Default(), + sw: sw, + log: log, + ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)), + }, nil } -// Open opens and returns the cache in the given directory. -// -// It is safe for multiple processes on a single machine to use the -// same cache directory in a local file system simultaneously. -// They will coordinate using operating system file locks and may -// duplicate effort but will not corrupt the cache. -// -// However, it is NOT safe for multiple processes on different machines -// to share a cache directory (for example, if the directory were stored -// in a network file system). File locking is notoriously unreliable in -// network file systems and may not suffice to protect the cache. -func Open(dir string) (*Cache, error) { - info, err := os.Stat(dir) +func (c *Cache) Close() { + err := c.sw.TrackStageErr("close", c.lowLevelCache.Close) if err != nil { - return nil, err + c.log.Errorf("cache close: %v", err) } - if !info.IsDir() { - return nil, &os.PathError{Op: "open", Path: dir, Err: errors.New("not a directory")} - } - for i := 0; i < 256; i++ { - name := filepath.Join(dir, fmt.Sprintf("%02x", i)) - if err := os.MkdirAll(name, 0744); err != nil { - return nil, err - } - } - c := &Cache{ - dir: dir, - now: time.Now, - } - return c, nil } -// fileName returns the name of the file corresponding to the given id. -func (c *Cache) fileName(id [HashSize]byte, key string) string { - return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) -} +func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error { + buf, err := c.encode(data) + if err != nil { + return err + } -var errMissing = errors.New("cache entry not found") + actionID, err := c.buildKey(pkg, mode, key) + if err != nil { + return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) + } + + err = c.putBytes(actionID, buf) + if err != nil { + return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err) + } -func IsErrMissing(err error) bool { - return errors.Is(err, errMissing) + return nil } -const ( - // action entry file is "v1 \n" - hexSize = HashSize * 2 - entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 -) +func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error { + actionID, err := c.buildKey(pkg, mode, key) + if err != nil { + return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) + } -// verify controls whether to run the cache in verify mode. -// In verify mode, the cache always returns errMissing from Get -// but then double-checks in Put that the data being written -// exactly matches any existing entry. This provides an easy -// way to detect program behavior that would have been different -// had the cache entry been returned from Get. -// -// verify is enabled by setting the environment variable -// GODEBUG=gocacheverify=1. -var verify = false - -// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. -var DebugTest = false - -func init() { initEnv() } - -func initEnv() { - verify = false - debugHash = false - debug := strings.Split(os.Getenv("GODEBUG"), ",") - for _, f := range debug { - if f == "gocacheverify=1" { - verify = true - } - if f == "gocachehash=1" { - debugHash = true - } - if f == "gocachetest=1" { - DebugTest = true + cachedData, err := c.getBytes(actionID) + if err != nil { + if cache.IsErrMissing(err) { + return ErrMissing } + return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err) } -} -// Get looks up the action ID in the cache, -// returning the corresponding output ID and file size, if any. -// Note that finding an output ID does not guarantee that the -// saved file for that output ID is still available. -func (c *Cache) Get(id ActionID) (Entry, error) { - if verify { - return Entry{}, errMissing - } - return c.get(id) + return c.decode(cachedData, data) } -type Entry struct { - OutputID OutputID - Size int64 - Time time.Time +func (c *Cache) buildKey(pkg *packages.Package, mode HashMode, key string) (cache.ActionID, error) { + return timeutils.TrackStage(c.sw, "key build", func() (cache.ActionID, error) { + actionID, err := c.pkgActionID(pkg, mode) + if err != nil { + return actionID, err + } + + subkey, subkeyErr := cache.Subkey(actionID, key) + if subkeyErr != nil { + return actionID, fmt.Errorf("failed to build subkey: %w", subkeyErr) + } + + return subkey, nil + }) } -// get is Get but does not respect verify mode, so that Put can use it. -func (c *Cache) get(id ActionID) (Entry, error) { - missing := func() (Entry, error) { - return Entry{}, errMissing - } - failed := func(err error) (Entry, error) { - return Entry{}, err - } - fileName := c.fileName(id, "a") - f, err := os.Open(fileName) +func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) { + hash, err := c.packageHash(pkg, mode) if err != nil { - if os.IsNotExist(err) { - return missing() - } - return failed(err) - } - defer f.Close() - entry := make([]byte, entrySize+1) // +1 to detect whether f is too long - if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF { - return failed(fmt.Errorf("read %d/%d bytes from %s with error %w", n, entrySize, fileName, readErr)) - } - if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { - return failed(fmt.Errorf("bad data in %s", fileName)) - } - eid, entry := entry[3:3+hexSize], entry[3+hexSize:] - eout, entry := entry[1:1+hexSize], entry[1+hexSize:] - esize, entry := entry[1:1+20], entry[1+20:] - etime := entry[1 : 1+20] - var buf [HashSize]byte - if _, err = hex.Decode(buf[:], eid); err != nil || buf != id { - return failed(fmt.Errorf("failed to hex decode eid data in %s: %w", fileName, err)) - } - if _, err = hex.Decode(buf[:], eout); err != nil { - return failed(fmt.Errorf("failed to hex decode eout data in %s: %w", fileName, err)) - } - i := 0 - for i < len(esize) && esize[i] == ' ' { - i++ - } - size, err := strconv.ParseInt(string(esize[i:]), 10, 64) - if err != nil || size < 0 { - return failed(fmt.Errorf("failed to parse esize int from %s with error %w", fileName, err)) - } - i = 0 - for i < len(etime) && etime[i] == ' ' { - i++ - } - tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) - if err != nil || tm < 0 { - return failed(fmt.Errorf("failed to parse etime int from %s with error %w", fileName, err)) + return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err) } - if err = c.used(fileName); err != nil { - return failed(fmt.Errorf("failed to mark %s as used: %w", fileName, err)) + key, err := cache.NewHash("action ID") + if err != nil { + return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err) } - return Entry{buf, size, time.Unix(0, tm)}, nil + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + fmt.Fprintf(key, "pkghash %s\n", hash) + + return key.Sum(), nil } -// GetBytes looks up the action ID in the cache and returns -// the corresponding output bytes. -// GetBytes should only be used for data that can be expected to fit in memory. -func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) { - entry, err := c.Get(id) - if err != nil { - return nil, entry, err - } - outputFile, err := c.OutputFile(entry.OutputID) - if err != nil { - return nil, entry, err +func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) { + results, found := c.pkgHashes.Load(pkg) + if found { + hashRes := results.(hashResults) + if result, ok := hashRes[mode]; ok { + return result, nil + } + + return "", fmt.Errorf("no mode %d in hash result", mode) } - data, err := robustio.ReadFile(outputFile) + hashRes, err := c.computePkgHash(pkg) if err != nil { - return nil, entry, err + return "", err } - if sha256.Sum256(data) != entry.OutputID { - return nil, entry, errMissing + result, found := hashRes[mode] + if !found { + return "", fmt.Errorf("invalid mode %d", mode) } - return data, entry, nil + + c.pkgHashes.Store(pkg, hashRes) + + return result, nil } -// OutputFile returns the name of the cache file storing output with the given OutputID. -func (c *Cache) OutputFile(out OutputID) (string, error) { - file := c.fileName(out, "d") - if err := c.used(file); err != nil { - return "", err +// computePkgHash computes a package's hash. +// The hash is based on all Go files that make up the package, +// as well as the hashes of imported packages. +func (c *Cache) computePkgHash(pkg *packages.Package) (hashResults, error) { + key, err := cache.NewHash("package hash") + if err != nil { + return nil, fmt.Errorf("failed to make a hash: %w", err) } - return file, nil -} -// Time constants for cache expiration. -// -// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), -// to avoid causing many unnecessary inode updates. The mtimes therefore -// roughly reflect "time of last use" but may in fact be older by at most an hour. -// -// We scan the cache for entries to delete at most once per trimInterval (1 day). -// -// When we do scan the cache, we delete entries that have not been used for -// at least trimLimit (5 days). Statistics gathered from a month of usage by -// Go developers found that essentially all reuse of cached entries happened -// within 5 days of the previous reuse. See golang.org/issue/22990. -const ( - mtimeInterval = 1 * time.Hour - trimInterval = 24 * time.Hour - trimLimit = 5 * 24 * time.Hour -) + hashRes := hashResults{} -// used makes a best-effort attempt to update mtime on file, -// so that mtime reflects cache access time. -// -// Because the reflection only needs to be approximate, -// and to reduce the amount of disk activity caused by using -// cache entries, used only updates the mtime if the current -// mtime is more than an hour old. This heuristic eliminates -// nearly all the mtime updates that would otherwise happen, -// while still keeping the mtimes useful for cache trimming. -func (c *Cache) used(file string) error { - info, err := os.Stat(file) - if err != nil { - if os.IsNotExist(err) { - return errMissing + fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) + + for _, f := range pkg.CompiledGoFiles { + h, fErr := c.fileHash(f) + if fErr != nil { + return nil, fmt.Errorf("failed to calculate file %s hash: %w", f, fErr) } - return fmt.Errorf("failed to stat file %s: %w", file, err) - } - if c.now().Sub(info.ModTime()) < mtimeInterval { - return nil + fmt.Fprintf(key, "file %s %x\n", f, h) } - if err := os.Chtimes(file, c.now(), c.now()); err != nil { - return fmt.Errorf("failed to change time of file %s: %w", file, err) - } + curSum := key.Sum() + hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:]) - return nil -} + imps := maps.Values(pkg.Imports) -// Trim removes old cache entries that are likely not to be reused. -func (c *Cache) Trim() { - now := c.now() - - // We maintain in dir/trim.txt the time of the last completed cache trim. - // If the cache has been trimmed recently enough, do nothing. - // This is the common case. - data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt")) - t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64) - if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval { - return - } + slices.SortFunc(imps, func(a, b *packages.Package) int { + return strings.Compare(a.PkgPath, b.PkgPath) + }) - // Trim each of the 256 subdirectories. - // We subtract an additional mtimeInterval - // to account for the imprecision of our "last used" mtimes. - cutoff := now.Add(-trimLimit - mtimeInterval) - for i := 0; i < 256; i++ { - subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) - c.trimSubdir(subdir, cutoff) + if err := c.computeDepsHash(HashModeNeedOnlySelf, imps, key); err != nil { + return nil, err } - // Ignore errors from here: if we don't write the complete timestamp, the - // cache will appear older than it is, and we'll trim it again next time. - _ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666) -} + curSum = key.Sum() + hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:]) -// trimSubdir trims a single cache subdirectory. -func (c *Cache) trimSubdir(subdir string, cutoff time.Time) { - // Read all directory entries from subdir before removing - // any files, in case removing files invalidates the file offset - // in the directory scan. Also, ignore error from f.Readdirnames, - // because we don't care about reporting the error, and we still - // want to process any entries found before the error. - f, err := os.Open(subdir) - if err != nil { - return + if err := c.computeDepsHash(HashModeNeedAllDeps, imps, key); err != nil { + return nil, err } - names, _ := f.Readdirnames(-1) - f.Close() - for _, name := range names { - // Remove only cache entries (xxxx-a and xxxx-d). - if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { + curSum = key.Sum() + hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:]) + + return hashRes, nil +} + +func (c *Cache) computeDepsHash(depMode HashMode, imps []*packages.Package, key *cache.Hash) error { + for _, dep := range imps { + if dep.PkgPath == "unsafe" { continue } - entry := filepath.Join(subdir, name) - info, err := os.Stat(entry) - if err == nil && info.ModTime().Before(cutoff) { - os.Remove(entry) + + depHash, err := c.packageHash(dep, depMode) + if err != nil { + return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, err) } + + fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash) } + + return nil } -// putIndexEntry adds an entry to the cache recording that executing the action -// with the given id produces an output with the given output id (hash) and size. -func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { - // Note: We expect that for one reason or another it may happen - // that repeating an action produces a different output hash - // (for example, if the output contains a time stamp or temp dir name). - // While not ideal, this is also not a correctness problem, so we - // don't make a big deal about it. In particular, we leave the action - // cache entries writable specifically so that they can be overwritten. - // - // Setting GODEBUG=gocacheverify=1 does make a big deal: - // in verify mode we are double-checking that the cache entries - // are entirely reproducible. As just noted, this may be unrealistic - // in some cases but the check is also useful for shaking out real bugs. - entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) - - if verify && allowVerify { - old, err := c.get(id) - if err == nil && (old.OutputID != out || old.Size != size) { - // panic to show stack trace, so we can see what code is generating this cache entry. - msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) - panic(msg) - } - } - file := c.fileName(id, "a") +func (c *Cache) putBytes(actionID cache.ActionID, buf *bytes.Buffer) error { + c.ioSem <- struct{}{} + + err := c.sw.TrackStageErr("cache io", func() error { + return cache.PutBytes(c.lowLevelCache, actionID, buf.Bytes()) + }) + + <-c.ioSem - // Copy file to cache directory. - mode := os.O_WRONLY | os.O_CREATE - f, err := os.OpenFile(file, mode, 0666) - if err != nil { - return err - } - _, err = f.WriteString(entry) - if err == nil { - // Truncate the file only *after* writing it. - // (This should be a no-op, but truncate just in case of previous corruption.) - // - // This differs from os.WriteFile, which truncates to 0 *before* writing - // via os.O_TRUNC. Truncating only after writing ensures that a second write - // of the same content to the same file is idempotent, and does not — even - // temporarily! — undo the effect of the first write. - err = f.Truncate(int64(len(entry))) - } - if closeErr := f.Close(); err == nil { - err = closeErr - } if err != nil { - // TODO(bcmills): This Remove potentially races with another go command writing to file. - // Can we eliminate it? - os.Remove(file) return err } - if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests - return fmt.Errorf("failed to change time of file %s: %w", file, err) - } return nil } -// Put stores the given output in the cache as the output for the action ID. -// It may read file twice. The content of file must not change between the two passes. -func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { - return c.put(id, file, true) -} +func (c *Cache) getBytes(actionID cache.ActionID) ([]byte, error) { + c.ioSem <- struct{}{} -// PutNoVerify is like Put but disables the verify check -// when GODEBUG=goverifycache=1 is set. -// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, -// like test output containing times and the like. -func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { - return c.put(id, file, false) -} + cachedData, err := timeutils.TrackStage(c.sw, "cache io", func() ([]byte, error) { + b, _, errGB := cache.GetBytes(c.lowLevelCache, actionID) + return b, errGB + }) -func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { - // Compute output ID. - h := sha256.New() - if _, err := file.Seek(0, 0); err != nil { - return OutputID{}, 0, err - } - size, err := io.Copy(h, file) - if err != nil { - return OutputID{}, 0, err - } - var out OutputID - h.Sum(out[:0]) + <-c.ioSem - // Copy to cached output file (if not already present). - if err := c.copyFile(file, out, size); err != nil { - return out, size, err + if err != nil { + return nil, err } - // Add to cache index. - return out, size, c.putIndexEntry(id, out, size, allowVerify) + return cachedData, nil } -// PutBytes stores the given bytes in the cache as the output for the action ID. -func (c *Cache) PutBytes(id ActionID, data []byte) error { - _, _, err := c.Put(id, bytes.NewReader(data)) - return err -} +func (c *Cache) fileHash(f string) ([cache.HashSize]byte, error) { + c.ioSem <- struct{}{} -// copyFile copies file into the cache, expecting it to have the given -// output ID and size, if that file is not present already. -func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { - name := c.fileName(out, "d") - info, err := os.Stat(name) - if err == nil && info.Size() == size { - // Check hash. - if f, openErr := os.Open(name); openErr == nil { - h := sha256.New() - if _, copyErr := io.Copy(h, f); copyErr != nil { - return fmt.Errorf("failed to copy to sha256: %w", copyErr) - } - - f.Close() - var out2 OutputID - h.Sum(out2[:0]) - if out == out2 { - return nil - } - } - // Hash did not match. Fall through and rewrite file. - } + h, err := cache.FileHash(f) + + <-c.ioSem - // Copy file to cache directory. - mode := os.O_RDWR | os.O_CREATE - if err == nil && info.Size() > size { // shouldn't happen but fix in case - mode |= os.O_TRUNC - } - f, err := os.OpenFile(name, mode, 0666) if err != nil { - return err - } - defer f.Close() - if size == 0 { - // File now exists with correct size. - // Only one possible zero-length file, so contents are OK too. - // Early return here makes sure there's a "last byte" for code below. - return nil + return [cache.HashSize]byte{}, err } - // From here on, if any of the I/O writing the file fails, - // we make a best-effort attempt to truncate the file f - // before returning, to avoid leaving bad bytes in the file. + return h, nil +} - // Copy file to f, but also into h to double-check hash. - if _, err = file.Seek(0, 0); err != nil { - _ = f.Truncate(0) - return err - } - h := sha256.New() - w := io.MultiWriter(f, h) - if _, err = io.CopyN(w, file, size-1); err != nil { - _ = f.Truncate(0) - return err - } - // Check last byte before writing it; writing it will make the size match - // what other processes expect to find and might cause them to start - // using the file. - buf := make([]byte, 1) - if _, err = file.Read(buf); err != nil { - _ = f.Truncate(0) - return err - } - if n, wErr := h.Write(buf); n != len(buf) { - return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr) +func (c *Cache) encode(data any) (*bytes.Buffer, error) { + buf := &bytes.Buffer{} + err := c.sw.TrackStageErr("gob", func() error { + return gob.NewEncoder(buf).Encode(data) + }) + if err != nil { + return nil, fmt.Errorf("failed to gob encode: %w", err) } - sum := h.Sum(nil) - if !bytes.Equal(sum, out[:]) { - _ = f.Truncate(0) - return errors.New("file content changed underfoot") - } + return buf, nil +} - // Commit cache file entry. - if _, err = f.Write(buf); err != nil { - _ = f.Truncate(0) - return err - } - if err = f.Close(); err != nil { - // Data might not have been written, - // but file may look like it is the right size. - // To be extra careful, remove cached file. - os.Remove(name) - return err - } - if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests - return fmt.Errorf("failed to change time of file %s: %w", name, err) +func (c *Cache) decode(b []byte, data any) error { + err := c.sw.TrackStageErr("gob", func() error { + return gob.NewDecoder(bytes.NewReader(b)).Decode(data) + }) + if err != nil { + return fmt.Errorf("failed to gob decode: %w", err) } return nil } + +func SetSalt(b *bytes.Buffer) { + cache.SetSalt(b.Bytes()) +} + +func DefaultDir() string { + cacheDir, _ := cache.DefaultDir() + return cacheDir +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md deleted file mode 100644 index b469711edd..0000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md +++ /dev/null @@ -1,18 +0,0 @@ -# cache - -Extracted from go/src/cmd/go/internal/cache/ -I don't know what version of Go this package was pulled from. - -Adapted for golangci-lint: -- https://github.com/golangci/golangci-lint/pull/699 -- https://github.com/golangci/golangci-lint/pull/779 -- https://github.com/golangci/golangci-lint/pull/788 -- https://github.com/golangci/golangci-lint/pull/808 -- https://github.com/golangci/golangci-lint/pull/1063 -- https://github.com/golangci/golangci-lint/pull/1070 -- https://github.com/golangci/golangci-lint/pull/1162 -- https://github.com/golangci/golangci-lint/pull/2318 -- https://github.com/golangci/golangci-lint/pull/2352 -- https://github.com/golangci/golangci-lint/pull/3012 -- https://github.com/golangci/golangci-lint/pull/3096 -- https://github.com/golangci/golangci-lint/pull/3204 diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE b/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE new file mode 100644 index 0000000000..6a66aea5ea --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go new file mode 100644 index 0000000000..7bf4f1d660 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go @@ -0,0 +1,663 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cache implements a build artifact cache. +// +// This package is a slightly modified fork of Go's +// cmd/go/internal/cache package. +package cache + +import ( + "bytes" + "crypto/sha256" + "encoding/hex" + "errors" + "fmt" + "io" + "io/fs" + "os" + "path/filepath" + "strconv" + "strings" + "time" + + "github.com/golangci/golangci-lint/internal/go/mmap" + "github.com/golangci/golangci-lint/internal/go/robustio" + "github.com/rogpeppe/go-internal/lockedfile" +) + +// An ActionID is a cache action key, the hash of a complete description of a +// repeatable computation (command line, environment variables, +// input file contents, executable contents). +type ActionID [HashSize]byte + +// An OutputID is a cache output key, the hash of an output of a computation. +type OutputID [HashSize]byte + +// Cache is the interface as used by the cmd/go. +type Cache interface { + // Get returns the cache entry for the provided ActionID. + // On miss, the error type should be of type *entryNotFoundError. + // + // After a success call to Get, OutputFile(Entry.OutputID) must + // exist on disk for until Close is called (at the end of the process). + Get(ActionID) (Entry, error) + + // Put adds an item to the cache. + // + // The seeker is only used to seek to the beginning. After a call to Put, + // the seek position is not guaranteed to be in any particular state. + // + // As a special case, if the ReadSeeker is of type noVerifyReadSeeker, + // the verification from GODEBUG=goverifycache=1 is skipped. + // + // After a success call to Get, OutputFile(Entry.OutputID) must + // exist on disk for until Close is called (at the end of the process). + Put(ActionID, io.ReadSeeker) (_ OutputID, size int64, _ error) + + // Close is called at the end of the go process. Implementations can do + // cache cleanup work at this phase, or wait for and report any errors from + // background cleanup work started earlier. Any cache trimming should in one + // process should not violate cause the invariants of this interface to be + // violated in another process. Namely, a cache trim from one process should + // not delete an ObjectID from disk that was recently Get or Put from + // another process. As a rule of thumb, don't trim things used in the last + // day. + Close() error + + // OutputFile returns the path on disk where OutputID is stored. + // + // It's only called after a successful get or put call so it doesn't need + // to return an error; it's assumed that if the previous get or put succeeded, + // it's already on disk. + OutputFile(OutputID) string + + // FuzzDir returns where fuzz files are stored. + FuzzDir() string +} + +// A Cache is a package cache, backed by a file system directory tree. +type DiskCache struct { + dir string + now func() time.Time +} + +// Open opens and returns the cache in the given directory. +// +// It is safe for multiple processes on a single machine to use the +// same cache directory in a local file system simultaneously. +// They will coordinate using operating system file locks and may +// duplicate effort but will not corrupt the cache. +// +// However, it is NOT safe for multiple processes on different machines +// to share a cache directory (for example, if the directory were stored +// in a network file system). File locking is notoriously unreliable in +// network file systems and may not suffice to protect the cache. +func Open(dir string) (*DiskCache, error) { + info, err := os.Stat(dir) + if err != nil { + return nil, err + } + if !info.IsDir() { + return nil, &fs.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")} + } + for i := 0; i < 256; i++ { + name := filepath.Join(dir, fmt.Sprintf("%02x", i)) + if err := os.MkdirAll(name, 0744); err != nil { + return nil, err + } + } + c := &DiskCache{ + dir: dir, + now: time.Now, + } + return c, nil +} + +// fileName returns the name of the file corresponding to the given id. +func (c *DiskCache) fileName(id [HashSize]byte, key string) string { + return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key) +} + +// An entryNotFoundError indicates that a cache entry was not found, with an +// optional underlying reason. +type entryNotFoundError struct { + Err error +} + +func (e *entryNotFoundError) Error() string { + if e.Err == nil { + return "cache entry not found" + } + return fmt.Sprintf("cache entry not found: %v", e.Err) +} + +func (e *entryNotFoundError) Unwrap() error { + return e.Err +} + +const ( + // action entry file is "v1 \n" + hexSize = HashSize * 2 + entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1 +) + +// verify controls whether to run the cache in verify mode. +// In verify mode, the cache always returns errMissing from Get +// but then double-checks in Put that the data being written +// exactly matches any existing entry. This provides an easy +// way to detect program behavior that would have been different +// had the cache entry been returned from Get. +// +// verify is enabled by setting the environment variable +// GODEBUG=gocacheverify=1. +var verify = false + +var errVerifyMode = errors.New("gocacheverify=1") + +// DebugTest is set when GODEBUG=gocachetest=1 is in the environment. +var DebugTest = false + +// func init() { initEnv() } + +// var ( +// gocacheverify = godebug.New("gocacheverify") +// gocachehash = godebug.New("gocachehash") +// gocachetest = godebug.New("gocachetest") +// ) + +// func initEnv() { +// if gocacheverify.Value() == "1" { +// gocacheverify.IncNonDefault() +// verify = true +// } +// if gocachehash.Value() == "1" { +// gocachehash.IncNonDefault() +// debugHash = true +// } +// if gocachetest.Value() == "1" { +// gocachetest.IncNonDefault() +// DebugTest = true +// } +// } + +// Get looks up the action ID in the cache, +// returning the corresponding output ID and file size, if any. +// Note that finding an output ID does not guarantee that the +// saved file for that output ID is still available. +func (c *DiskCache) Get(id ActionID) (Entry, error) { + if verify { + return Entry{}, &entryNotFoundError{Err: errVerifyMode} + } + return c.get(id) +} + +type Entry struct { + OutputID OutputID + Size int64 + Time time.Time // when added to cache +} + +// get is Get but does not respect verify mode, so that Put can use it. +func (c *DiskCache) get(id ActionID) (Entry, error) { + missing := func(reason error) (Entry, error) { + return Entry{}, &entryNotFoundError{Err: reason} + } + f, err := os.Open(c.fileName(id, "a")) + if err != nil { + return missing(err) + } + defer f.Close() + entry := make([]byte, entrySize+1) // +1 to detect whether f is too long + if n, err := io.ReadFull(f, entry); n > entrySize { + return missing(errors.New("too long")) + } else if err != io.ErrUnexpectedEOF { + if err == io.EOF { + return missing(errors.New("file is empty")) + } + return missing(err) + } else if n < entrySize { + return missing(errors.New("entry file incomplete")) + } + if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' { + return missing(errors.New("invalid header")) + } + eid, entry := entry[3:3+hexSize], entry[3+hexSize:] + eout, entry := entry[1:1+hexSize], entry[1+hexSize:] + esize, entry := entry[1:1+20], entry[1+20:] + etime, entry := entry[1:1+20], entry[1+20:] + var buf [HashSize]byte + if _, err := hex.Decode(buf[:], eid); err != nil { + return missing(fmt.Errorf("decoding ID: %v", err)) + } else if buf != id { + return missing(errors.New("mismatched ID")) + } + if _, err := hex.Decode(buf[:], eout); err != nil { + return missing(fmt.Errorf("decoding output ID: %v", err)) + } + i := 0 + for i < len(esize) && esize[i] == ' ' { + i++ + } + size, err := strconv.ParseInt(string(esize[i:]), 10, 64) + if err != nil { + return missing(fmt.Errorf("parsing size: %v", err)) + } else if size < 0 { + return missing(errors.New("negative size")) + } + i = 0 + for i < len(etime) && etime[i] == ' ' { + i++ + } + tm, err := strconv.ParseInt(string(etime[i:]), 10, 64) + if err != nil { + return missing(fmt.Errorf("parsing timestamp: %v", err)) + } else if tm < 0 { + return missing(errors.New("negative timestamp")) + } + + err = c.used(c.fileName(id, "a")) + if err != nil { + return Entry{}, fmt.Errorf("failed to mark %s as used: %w", c.fileName(id, "a"), err) + } + + return Entry{buf, size, time.Unix(0, tm)}, nil +} + +// GetFile looks up the action ID in the cache and returns +// the name of the corresponding data file. +func GetFile(c Cache, id ActionID) (file string, entry Entry, err error) { + entry, err = c.Get(id) + if err != nil { + return "", Entry{}, err + } + file = c.OutputFile(entry.OutputID) + info, err := os.Stat(file) + if err != nil { + return "", Entry{}, &entryNotFoundError{Err: err} + } + if info.Size() != entry.Size { + return "", Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")} + } + return file, entry, nil +} + +// GetBytes looks up the action ID in the cache and returns +// the corresponding output bytes. +// GetBytes should only be used for data that can be expected to fit in memory. +func GetBytes(c Cache, id ActionID) ([]byte, Entry, error) { + entry, err := c.Get(id) + if err != nil { + return nil, entry, err + } + data, err := robustio.ReadFile(c.OutputFile(entry.OutputID)) + if err != nil { + return nil, entry, err + } + if sha256.Sum256(data) != entry.OutputID { + return nil, entry, &entryNotFoundError{Err: errors.New("bad checksum")} + } + return data, entry, nil +} + +// GetMmap looks up the action ID in the cache and returns +// the corresponding output bytes. +// GetMmap should only be used for data that can be expected to fit in memory. +func GetMmap(c Cache, id ActionID) ([]byte, Entry, error) { + entry, err := c.Get(id) + if err != nil { + return nil, entry, err + } + md, err := mmap.Mmap(c.OutputFile(entry.OutputID)) + if err != nil { + return nil, Entry{}, err + } + if int64(len(md.Data)) != entry.Size { + return nil, Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")} + } + return md.Data, entry, nil +} + +// OutputFile returns the name of the cache file storing output with the given OutputID. +func (c *DiskCache) OutputFile(out OutputID) string { + file := c.fileName(out, "d") + c.used(file) + return file +} + +// Time constants for cache expiration. +// +// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour), +// to avoid causing many unnecessary inode updates. The mtimes therefore +// roughly reflect "time of last use" but may in fact be older by at most an hour. +// +// We scan the cache for entries to delete at most once per trimInterval (1 day). +// +// When we do scan the cache, we delete entries that have not been used for +// at least trimLimit (5 days). Statistics gathered from a month of usage by +// Go developers found that essentially all reuse of cached entries happened +// within 5 days of the previous reuse. See golang.org/issue/22990. +const ( + mtimeInterval = 1 * time.Hour + trimInterval = 24 * time.Hour + trimLimit = 5 * 24 * time.Hour +) + +// used makes a best-effort attempt to update mtime on file, +// so that mtime reflects cache access time. +// +// Because the reflection only needs to be approximate, +// and to reduce the amount of disk activity caused by using +// cache entries, used only updates the mtime if the current +// mtime is more than an hour old. This heuristic eliminates +// nearly all of the mtime updates that would otherwise happen, +// while still keeping the mtimes useful for cache trimming. +func (c *DiskCache) used(file string) error { + info, err := os.Stat(file) + if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval { + return nil + } + + if err != nil { + if os.IsNotExist(err) { + return &entryNotFoundError{Err: err} + } + return &entryNotFoundError{Err: fmt.Errorf("failed to stat file %s: %w", file, err)} + } + + err = os.Chtimes(file, c.now(), c.now()) + if err != nil { + return fmt.Errorf("failed to change time of file %s: %w", file, err) + } + + return nil +} + +func (c *DiskCache) Close() error { return c.Trim() } + +// Trim removes old cache entries that are likely not to be reused. +func (c *DiskCache) Trim() error { + now := c.now() + + // We maintain in dir/trim.txt the time of the last completed cache trim. + // If the cache has been trimmed recently enough, do nothing. + // This is the common case. + // If the trim file is corrupt, detected if the file can't be parsed, or the + // trim time is too far in the future, attempt the trim anyway. It's possible that + // the cache was full when the corruption happened. Attempting a trim on + // an empty cache is cheap, so there wouldn't be a big performance hit in that case. + if data, err := lockedfile.Read(filepath.Join(c.dir, "trim.txt")); err == nil { + if t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64); err == nil { + lastTrim := time.Unix(t, 0) + if d := now.Sub(lastTrim); d < trimInterval && d > -mtimeInterval { + return nil + } + } + } + + // Trim each of the 256 subdirectories. + // We subtract an additional mtimeInterval + // to account for the imprecision of our "last used" mtimes. + cutoff := now.Add(-trimLimit - mtimeInterval) + for i := 0; i < 256; i++ { + subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i)) + c.trimSubdir(subdir, cutoff) + } + + // Ignore errors from here: if we don't write the complete timestamp, the + // cache will appear older than it is, and we'll trim it again next time. + var b bytes.Buffer + fmt.Fprintf(&b, "%d", now.Unix()) + if err := lockedfile.Write(filepath.Join(c.dir, "trim.txt"), &b, 0666); err != nil { + return err + } + + return nil +} + +// trimSubdir trims a single cache subdirectory. +func (c *DiskCache) trimSubdir(subdir string, cutoff time.Time) { + // Read all directory entries from subdir before removing + // any files, in case removing files invalidates the file offset + // in the directory scan. Also, ignore error from f.Readdirnames, + // because we don't care about reporting the error and we still + // want to process any entries found before the error. + f, err := os.Open(subdir) + if err != nil { + return + } + names, _ := f.Readdirnames(-1) + f.Close() + + for _, name := range names { + // Remove only cache entries (xxxx-a and xxxx-d). + if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") { + continue + } + entry := filepath.Join(subdir, name) + info, err := os.Stat(entry) + if err == nil && info.ModTime().Before(cutoff) { + os.Remove(entry) + } + } +} + +// putIndexEntry adds an entry to the cache recording that executing the action +// with the given id produces an output with the given output id (hash) and size. +func (c *DiskCache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error { + // Note: We expect that for one reason or another it may happen + // that repeating an action produces a different output hash + // (for example, if the output contains a time stamp or temp dir name). + // While not ideal, this is also not a correctness problem, so we + // don't make a big deal about it. In particular, we leave the action + // cache entries writable specifically so that they can be overwritten. + // + // Setting GODEBUG=gocacheverify=1 does make a big deal: + // in verify mode we are double-checking that the cache entries + // are entirely reproducible. As just noted, this may be unrealistic + // in some cases but the check is also useful for shaking out real bugs. + entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano()) + if verify && allowVerify { + old, err := c.get(id) + if err == nil && (old.OutputID != out || old.Size != size) { + // panic to show stack trace, so we can see what code is generating this cache entry. + msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size) + panic(msg) + } + } + file := c.fileName(id, "a") + + // Copy file to cache directory. + mode := os.O_WRONLY | os.O_CREATE + f, err := os.OpenFile(file, mode, 0666) + if err != nil { + return err + } + _, err = f.WriteString(entry) + if err == nil { + // Truncate the file only *after* writing it. + // (This should be a no-op, but truncate just in case of previous corruption.) + // + // This differs from os.WriteFile, which truncates to 0 *before* writing + // via os.O_TRUNC. Truncating only after writing ensures that a second write + // of the same content to the same file is idempotent, and does not — even + // temporarily! — undo the effect of the first write. + err = f.Truncate(int64(len(entry))) + } + if closeErr := f.Close(); err == nil { + err = closeErr + } + if err != nil { + // TODO(bcmills): This Remove potentially races with another go command writing to file. + // Can we eliminate it? + os.Remove(file) + return err + } + err = os.Chtimes(file, c.now(), c.now()) // mainly for tests + if err != nil { + return fmt.Errorf("failed to change time of file %s: %w", file, err) + } + + return nil +} + +// noVerifyReadSeeker is an io.ReadSeeker wrapper sentinel type +// that says that Cache.Put should skip the verify check +// (from GODEBUG=goverifycache=1). +type noVerifyReadSeeker struct { + io.ReadSeeker +} + +// Put stores the given output in the cache as the output for the action ID. +// It may read file twice. The content of file must not change between the two passes. +func (c *DiskCache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + wrapper, isNoVerify := file.(noVerifyReadSeeker) + if isNoVerify { + file = wrapper.ReadSeeker + } + return c.put(id, file, !isNoVerify) +} + +// PutNoVerify is like Put but disables the verify check +// when GODEBUG=goverifycache=1 is set. +// It is meant for data that is OK to cache but that we expect to vary slightly from run to run, +// like test output containing times and the like. +func PutNoVerify(c Cache, id ActionID, file io.ReadSeeker) (OutputID, int64, error) { + return c.Put(id, noVerifyReadSeeker{file}) +} + +func (c *DiskCache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) { + // Compute output ID. + h := sha256.New() + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + size, err := io.Copy(h, file) + if err != nil { + return OutputID{}, 0, err + } + var out OutputID + h.Sum(out[:0]) + + // Copy to cached output file (if not already present). + if err := c.copyFile(file, out, size); err != nil { + return out, size, err + } + + // Add to cache index. + return out, size, c.putIndexEntry(id, out, size, allowVerify) +} + +// PutBytes stores the given bytes in the cache as the output for the action ID. +func PutBytes(c Cache, id ActionID, data []byte) error { + _, _, err := c.Put(id, bytes.NewReader(data)) + return err +} + +// copyFile copies file into the cache, expecting it to have the given +// output ID and size, if that file is not present already. +func (c *DiskCache) copyFile(file io.ReadSeeker, out OutputID, size int64) error { + name := c.fileName(out, "d") + info, err := os.Stat(name) + if err == nil && info.Size() == size { + // Check hash. + if f, err := os.Open(name); err == nil { + h := sha256.New() + _, copyErr := io.Copy(h, f) + if copyErr != nil { + return fmt.Errorf("failed to copy to sha256: %w", copyErr) + } + + f.Close() + var out2 OutputID + h.Sum(out2[:0]) + if out == out2 { + return nil + } + } + // Hash did not match. Fall through and rewrite file. + } + + // Copy file to cache directory. + mode := os.O_RDWR | os.O_CREATE + if err == nil && info.Size() > size { // shouldn't happen but fix in case + mode |= os.O_TRUNC + } + f, err := os.OpenFile(name, mode, 0666) + if err != nil { + return err + } + defer f.Close() + if size == 0 { + // File now exists with correct size. + // Only one possible zero-length file, so contents are OK too. + // Early return here makes sure there's a "last byte" for code below. + return nil + } + + // From here on, if any of the I/O writing the file fails, + // we make a best-effort attempt to truncate the file f + // before returning, to avoid leaving bad bytes in the file. + + // Copy file to f, but also into h to double-check hash. + if _, err := file.Seek(0, 0); err != nil { + f.Truncate(0) + return err + } + h := sha256.New() + w := io.MultiWriter(f, h) + if _, err := io.CopyN(w, file, size-1); err != nil { + f.Truncate(0) + return err + } + // Check last byte before writing it; writing it will make the size match + // what other processes expect to find and might cause them to start + // using the file. + buf := make([]byte, 1) + if _, err := file.Read(buf); err != nil { + f.Truncate(0) + return err + } + n, wErr := h.Write(buf) + if n != len(buf) { + return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr) + } + + sum := h.Sum(nil) + if !bytes.Equal(sum, out[:]) { + f.Truncate(0) + return fmt.Errorf("file content changed underfoot") + } + + // Commit cache file entry. + if _, err := f.Write(buf); err != nil { + f.Truncate(0) + return err + } + if err := f.Close(); err != nil { + // Data might not have been written, + // but file may look like it is the right size. + // To be extra careful, remove cached file. + os.Remove(name) + return err + } + err = os.Chtimes(name, c.now(), c.now()) // mainly for tests + if err != nil { + return fmt.Errorf("failed to change time of file %s: %w", name, err) + } + + return nil +} + +// FuzzDir returns a subdirectory within the cache for storing fuzzing data. +// The subdirectory may not exist. +// +// This directory is managed by the internal/fuzz package. Files in this +// directory aren't removed by the 'go clean -cache' command or by Trim. +// They may be removed with 'go clean -fuzzcache'. +// +// TODO(#48526): make Trim remove unused files from this directory. +func (c *DiskCache) FuzzDir() string { + return filepath.Join(c.dir, "fuzz") +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go new file mode 100644 index 0000000000..b4f07738e6 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go @@ -0,0 +1,12 @@ +package cache + +import ( + "errors" +) + +// IsErrMissing allows to access to the internal error. +// TODO(ldez) the handling of this error inside runner_action.go should be refactored. +func IsErrMissing(err error) bool { + var errENF *entryNotFoundError + return errors.As(err, &errENF) +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go similarity index 58% rename from vendor/github.com/golangci/golangci-lint/internal/cache/default.go rename to vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go index 399cc84cf0..7232f1ef3e 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go @@ -6,23 +6,22 @@ package cache import ( "fmt" - "log" + base "log" "os" "path/filepath" "sync" ) -const envGolangciLintCache = "GOLANGCI_LINT_CACHE" - // Default returns the default cache to use. -func Default() (*Cache, error) { +// It never returns nil. +func Default() Cache { defaultOnce.Do(initDefaultCache) - return defaultCache, defaultDirErr + return defaultCache } var ( defaultOnce sync.Once - defaultCache *Cache + defaultCache Cache ) // cacheREADME is a message stored in a README in the cache directory. @@ -34,32 +33,46 @@ const cacheREADME = `This directory holds cached build artifacts from golangci-l // initDefaultCache does the work of finding the default cache // the first time Default is called. func initDefaultCache() { - dir := DefaultDir() + dir, _ := DefaultDir() + if dir == "off" { + if defaultDirErr != nil { + base.Fatalf("build cache is required, but could not be located: %v", defaultDirErr) + } + base.Fatalf("build cache is disabled by %s=off, but required", envGolangciLintCache) + } if err := os.MkdirAll(dir, 0744); err != nil { - log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) } if _, err := os.Stat(filepath.Join(dir, "README")); err != nil { // Best effort. if wErr := os.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil { - log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err) + base.Fatalf("Failed to write README file to cache dir %s: %s", dir, err) } } - c, err := Open(dir) + diskCache, err := Open(dir) if err != nil { - log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err) + } + + if v := os.Getenv(envGolangciLintCacheProg); v != "" { + defaultCache = startCacheProg(v, diskCache) + } else { + defaultCache = diskCache } - defaultCache = c } var ( - defaultDirOnce sync.Once - defaultDir string - defaultDirErr error + defaultDirOnce sync.Once + defaultDir string + defaultDirChanged bool // effective value differs from $GOLANGCI_LINT_CACHE + defaultDirErr error ) // DefaultDir returns the effective GOLANGCI_LINT_CACHE setting. -func DefaultDir() string { +// It returns "off" if the cache is disabled, +// and reports whether the effective value differs from GOLANGCI_LINT_CACHE. +func DefaultDir() (string, bool) { // Save the result of the first call to DefaultDir for later use in // initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that // subprocesses will inherit it, but that means initDefaultCache can't @@ -67,10 +80,12 @@ func DefaultDir() string { defaultDirOnce.Do(func() { defaultDir = os.Getenv(envGolangciLintCache) - if filepath.IsAbs(defaultDir) { - return - } if defaultDir != "" { + defaultDirChanged = true + if filepath.IsAbs(defaultDir) || defaultDir == "off" { + return + } + defaultDir = "off" defaultDirErr = fmt.Errorf("%s is not an absolute path", envGolangciLintCache) return } @@ -78,11 +93,13 @@ func DefaultDir() string { // Compute default location. dir, err := os.UserCacheDir() if err != nil { + defaultDir = "off" + defaultDirChanged = true defaultDirErr = fmt.Errorf("%s is not defined and %w", envGolangciLintCache, err) return } defaultDir = filepath.Join(dir, "golangci-lint") }) - return defaultDir + return defaultDir, defaultDirChanged } diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go new file mode 100644 index 0000000000..a801f67f47 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go @@ -0,0 +1,6 @@ +package cache + +const ( + envGolangciLintCache = "GOLANGCI_LINT_CACHE" + envGolangciLintCacheProg = "GOLANGCI_LINT_CACHEPROG" +) diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go similarity index 91% rename from vendor/github.com/golangci/golangci-lint/internal/cache/hash.go rename to vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go index 4ce79e325b..d5169dd4c4 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go @@ -11,6 +11,7 @@ import ( "hash" "io" "os" + "strings" "sync" ) @@ -36,22 +37,26 @@ type Hash struct { // which are still addressed by unsalted SHA256. var hashSalt []byte -func SetSalt(b []byte) { - hashSalt = b +// stripExperiment strips any GOEXPERIMENT configuration from the Go +// version string. +func stripExperiment(version string) string { + if i := strings.Index(version, " X:"); i >= 0 { + return version[:i] + } + return version } // Subkey returns an action ID corresponding to mixing a parent // action ID with a string description of the subkey. func Subkey(parent ActionID, desc string) (ActionID, error) { h := sha256.New() - const subkeyPrefix = "subkey:" - if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) { - return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err) - } - if n, err := h.Write(parent[:]); n != len(parent) { + h.Write([]byte(("subkey:"))) + n, err := h.Write(parent[:]) + if n != len(parent) { return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err) } - if n, err := h.Write([]byte(desc)); n != len(desc) { + n, err = h.Write([]byte(desc)) + if n != len(desc) { return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err) } @@ -75,7 +80,8 @@ func NewHash(name string) (*Hash, error) { if debugHash { fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name) } - if n, err := h.Write(hashSalt); n != len(hashSalt) { + n, err := h.Write(hashSalt) + if n != len(hashSalt) { return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err) } if verify { diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go new file mode 100644 index 0000000000..08749036bd --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go @@ -0,0 +1,5 @@ +package cache + +func SetSalt(b []byte) { + hashSalt = b +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go new file mode 100644 index 0000000000..a93740a3cf --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go @@ -0,0 +1,428 @@ +// Copyright 2023 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cache + +import ( + "bufio" + "context" + "crypto/sha256" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "io" + "log" + base "log" + "os" + "os/exec" + "sync" + "sync/atomic" + "time" + + "github.com/golangci/golangci-lint/internal/go/quoted" +) + +// ProgCache implements Cache via JSON messages over stdin/stdout to a child +// helper process which can then implement whatever caching policy/mechanism it +// wants. +// +// See https://github.com/golang/go/issues/59719 +type ProgCache struct { + cmd *exec.Cmd + stdout io.ReadCloser // from the child process + stdin io.WriteCloser // to the child process + bw *bufio.Writer // to stdin + jenc *json.Encoder // to bw + + // can are the commands that the child process declared that it supports. + // This is effectively the versioning mechanism. + can map[ProgCmd]bool + + // fuzzDirCache is another Cache implementation to use for the FuzzDir + // method. In practice this is the default GOCACHE disk-based + // implementation. + // + // TODO(bradfitz): maybe this isn't ideal. But we'd need to extend the Cache + // interface and the fuzzing callers to be less disk-y to do more here. + fuzzDirCache Cache + + closing atomic.Bool + ctx context.Context // valid until Close via ctxClose + ctxCancel context.CancelFunc // called on Close + readLoopDone chan struct{} // closed when readLoop returns + + mu sync.Mutex // guards following fields + nextID int64 + inFlight map[int64]chan<- *ProgResponse + outputFile map[OutputID]string // object => abs path on disk + + // writeMu serializes writing to the child process. + // It must never be held at the same time as mu. + writeMu sync.Mutex +} + +// ProgCmd is a command that can be issued to a child process. +// +// If the interface needs to grow, we can add new commands or new versioned +// commands like "get2". +type ProgCmd string + +const ( + cmdGet = ProgCmd("get") + cmdPut = ProgCmd("put") + cmdClose = ProgCmd("close") +) + +// ProgRequest is the JSON-encoded message that's sent from cmd/go to +// the GOLANGCI_LINT_CACHEPROG child process over stdin. Each JSON object is on its +// own line. A ProgRequest of Type "put" with BodySize > 0 will be followed +// by a line containing a base64-encoded JSON string literal of the body. +type ProgRequest struct { + // ID is a unique number per process across all requests. + // It must be echoed in the ProgResponse from the child. + ID int64 + + // Command is the type of request. + // The cmd/go tool will only send commands that were declared + // as supported by the child. + Command ProgCmd + + // ActionID is non-nil for get and puts. + ActionID []byte `json:",omitempty"` // or nil if not used + + // ObjectID is set for Type "put" and "output-file". + ObjectID []byte `json:",omitempty"` // or nil if not used + + // Body is the body for "put" requests. It's sent after the JSON object + // as a base64-encoded JSON string when BodySize is non-zero. + // It's sent as a separate JSON value instead of being a struct field + // send in this JSON object so large values can be streamed in both directions. + // The base64 string body of a ProgRequest will always be written + // immediately after the JSON object and a newline. + Body io.Reader `json:"-"` + + // BodySize is the number of bytes of Body. If zero, the body isn't written. + BodySize int64 `json:",omitempty"` +} + +// ProgResponse is the JSON response from the child process to cmd/go. +// +// With the exception of the first protocol message that the child writes to its +// stdout with ID==0 and KnownCommands populated, these are only sent in +// response to a ProgRequest from cmd/go. +// +// ProgResponses can be sent in any order. The ID must match the request they're +// replying to. +type ProgResponse struct { + ID int64 // that corresponds to ProgRequest; they can be answered out of order + Err string `json:",omitempty"` // if non-empty, the error + + // KnownCommands is included in the first message that cache helper program + // writes to stdout on startup (with ID==0). It includes the + // ProgRequest.Command types that are supported by the program. + // + // This lets us extend the protocol gracefully over time (adding "get2", + // etc), or fail gracefully when needed. It also lets us verify the program + // wants to be a cache helper. + KnownCommands []ProgCmd `json:",omitempty"` + + // For Get requests. + + Miss bool `json:",omitempty"` // cache miss + OutputID []byte `json:",omitempty"` + Size int64 `json:",omitempty"` // in bytes + Time *time.Time `json:",omitempty"` // an Entry.Time; when the object was added to the docs + + // DiskPath is the absolute path on disk of the ObjectID corresponding + // a "get" request's ActionID (on cache hit) or a "put" request's + // provided ObjectID. + DiskPath string `json:",omitempty"` +} + +// startCacheProg starts the prog binary (with optional space-separated flags) +// and returns a Cache implementation that talks to it. +// +// It blocks a few seconds to wait for the child process to successfully start +// and advertise its capabilities. +func startCacheProg(progAndArgs string, fuzzDirCache Cache) Cache { + if fuzzDirCache == nil { + panic("missing fuzzDirCache") + } + args, err := quoted.Split(progAndArgs) + if err != nil { + base.Fatalf("%s args: %v", envGolangciLintCacheProg, err) + } + var prog string + if len(args) > 0 { + prog = args[0] + args = args[1:] + } + + ctx, ctxCancel := context.WithCancel(context.Background()) + + cmd := exec.CommandContext(ctx, prog, args...) + out, err := cmd.StdoutPipe() + if err != nil { + base.Fatalf("StdoutPipe to %s: %v", envGolangciLintCacheProg, err) + } + in, err := cmd.StdinPipe() + if err != nil { + base.Fatalf("StdinPipe to %s: %v", envGolangciLintCacheProg, err) + } + cmd.Stderr = os.Stderr + cmd.Cancel = in.Close + + if err := cmd.Start(); err != nil { + base.Fatalf("error starting %s program %q: %v", envGolangciLintCacheProg, prog, err) + } + + pc := &ProgCache{ + ctx: ctx, + ctxCancel: ctxCancel, + fuzzDirCache: fuzzDirCache, + cmd: cmd, + stdout: out, + stdin: in, + bw: bufio.NewWriter(in), + inFlight: make(map[int64]chan<- *ProgResponse), + outputFile: make(map[OutputID]string), + readLoopDone: make(chan struct{}), + } + + // Register our interest in the initial protocol message from the child to + // us, saying what it can do. + capResc := make(chan *ProgResponse, 1) + pc.inFlight[0] = capResc + + pc.jenc = json.NewEncoder(pc.bw) + go pc.readLoop(pc.readLoopDone) + + // Give the child process a few seconds to report its capabilities. This + // should be instant and not require any slow work by the program. + timer := time.NewTicker(5 * time.Second) + defer timer.Stop() + for { + select { + case <-timer.C: + log.Printf("# still waiting for %s %v ...", envGolangciLintCacheProg, prog) + case capRes := <-capResc: + can := map[ProgCmd]bool{} + for _, cmd := range capRes.KnownCommands { + can[cmd] = true + } + if len(can) == 0 { + base.Fatalf("%s %v declared no supported commands", envGolangciLintCacheProg, prog) + } + pc.can = can + return pc + } + } +} + +func (c *ProgCache) readLoop(readLoopDone chan<- struct{}) { + defer close(readLoopDone) + jd := json.NewDecoder(c.stdout) + for { + res := new(ProgResponse) + if err := jd.Decode(res); err != nil { + if c.closing.Load() { + return // quietly + } + if err == io.EOF { + c.mu.Lock() + inFlight := len(c.inFlight) + c.mu.Unlock() + base.Fatalf("%s exited pre-Close with %v pending requests", envGolangciLintCacheProg, inFlight) + } + base.Fatalf("error reading JSON from %s: %v", envGolangciLintCacheProg, err) + } + c.mu.Lock() + ch, ok := c.inFlight[res.ID] + delete(c.inFlight, res.ID) + c.mu.Unlock() + if ok { + ch <- res + } else { + base.Fatalf("%s sent response for unknown request ID %v", envGolangciLintCacheProg, res.ID) + } + } +} + +func (c *ProgCache) send(ctx context.Context, req *ProgRequest) (*ProgResponse, error) { + resc := make(chan *ProgResponse, 1) + if err := c.writeToChild(req, resc); err != nil { + return nil, err + } + select { + case res := <-resc: + if res.Err != "" { + return nil, errors.New(res.Err) + } + return res, nil + case <-ctx.Done(): + return nil, ctx.Err() + } +} + +func (c *ProgCache) writeToChild(req *ProgRequest, resc chan<- *ProgResponse) (err error) { + c.mu.Lock() + c.nextID++ + req.ID = c.nextID + c.inFlight[req.ID] = resc + c.mu.Unlock() + + defer func() { + if err != nil { + c.mu.Lock() + delete(c.inFlight, req.ID) + c.mu.Unlock() + } + }() + + c.writeMu.Lock() + defer c.writeMu.Unlock() + + if err := c.jenc.Encode(req); err != nil { + return err + } + if err := c.bw.WriteByte('\n'); err != nil { + return err + } + if req.Body != nil && req.BodySize > 0 { + if err := c.bw.WriteByte('"'); err != nil { + return err + } + e := base64.NewEncoder(base64.StdEncoding, c.bw) + wrote, err := io.Copy(e, req.Body) + if err != nil { + return err + } + if err := e.Close(); err != nil { + return nil + } + if wrote != req.BodySize { + return fmt.Errorf("short write writing body to %s for action %x, object %x: wrote %v; expected %v", + envGolangciLintCacheProg, req.ActionID, req.ObjectID, wrote, req.BodySize) + } + if _, err := c.bw.WriteString("\"\n"); err != nil { + return err + } + } + if err := c.bw.Flush(); err != nil { + return err + } + return nil +} + +func (c *ProgCache) Get(a ActionID) (Entry, error) { + if !c.can[cmdGet] { + // They can't do a "get". Maybe they're a write-only cache. + // + // TODO(bradfitz,bcmills): figure out the proper error type here. Maybe + // errors.ErrUnsupported? Is entryNotFoundError even appropriate? There + // might be places where we rely on the fact that a recent Put can be + // read through a corresponding Get. Audit callers and check, and document + // error types on the Cache interface. + return Entry{}, &entryNotFoundError{} + } + res, err := c.send(c.ctx, &ProgRequest{ + Command: cmdGet, + ActionID: a[:], + }) + if err != nil { + return Entry{}, err // TODO(bradfitz): or entryNotFoundError? Audit callers. + } + if res.Miss { + return Entry{}, &entryNotFoundError{} + } + e := Entry{ + Size: res.Size, + } + if res.Time != nil { + e.Time = *res.Time + } else { + e.Time = time.Now() + } + if res.DiskPath == "" { + return Entry{}, &entryNotFoundError{fmt.Errorf("%s didn't populate DiskPath on get hit", envGolangciLintCacheProg)} + } + if copy(e.OutputID[:], res.OutputID) != len(res.OutputID) { + return Entry{}, &entryNotFoundError{errors.New("incomplete ProgResponse OutputID")} + } + c.noteOutputFile(e.OutputID, res.DiskPath) + return e, nil +} + +func (c *ProgCache) noteOutputFile(o OutputID, diskPath string) { + c.mu.Lock() + defer c.mu.Unlock() + c.outputFile[o] = diskPath +} + +func (c *ProgCache) OutputFile(o OutputID) string { + c.mu.Lock() + defer c.mu.Unlock() + return c.outputFile[o] +} + +func (c *ProgCache) Put(a ActionID, file io.ReadSeeker) (_ OutputID, size int64, _ error) { + // Compute output ID. + h := sha256.New() + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + size, err := io.Copy(h, file) + if err != nil { + return OutputID{}, 0, err + } + var out OutputID + h.Sum(out[:0]) + + if _, err := file.Seek(0, 0); err != nil { + return OutputID{}, 0, err + } + + if !c.can[cmdPut] { + // Child is a read-only cache. Do nothing. + return out, size, nil + } + + res, err := c.send(c.ctx, &ProgRequest{ + Command: cmdPut, + ActionID: a[:], + ObjectID: out[:], + Body: file, + BodySize: size, + }) + if err != nil { + return OutputID{}, 0, err + } + if res.DiskPath == "" { + return OutputID{}, 0, fmt.Errorf("%s didn't return DiskPath in put response", envGolangciLintCacheProg) + } + c.noteOutputFile(out, res.DiskPath) + return out, size, err +} + +func (c *ProgCache) Close() error { + c.closing.Store(true) + var err error + + // First write a "close" message to the child so it can exit nicely + // and clean up if it wants. Only after that exchange do we cancel + // the context that kills the process. + if c.can[cmdClose] { + _, err = c.send(c.ctx, &ProgRequest{Command: cmdClose}) + } + c.ctxCancel() + <-c.readLoopDone + return err +} + +func (c *ProgCache) FuzzDir() string { + // TODO(bradfitz): figure out what to do here. For now just use the + // disk-based default. + return c.fuzzDirCache.FuzzDir() +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md new file mode 100644 index 0000000000..5be600e425 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md @@ -0,0 +1,51 @@ +# cache + +Extracted from `go/src/cmd/go/internal/cache/`. + +The main modifications are: +- The errors management + - Some methods return error. + - Some errors are returned instead of being ignored. +- The name of the env vars: + - `GOCACHE` -> `GOLANGCI_LINT_CACHE` + - `GOCACHEPROG` -> `GOLANGCI_LINT_CACHEPROG` + +## History + +- https://github.com/golangci/golangci-lint/pull/5100 + - Move package from `internal/cache` to `internal/go/cache` +- https://github.com/golangci/golangci-lint/pull/5098 + - sync with go1.23.2 + - sync with go1.22.8 + - sync with go1.21.13 + - sync with go1.20.14 + - sync with go1.19.13 + - sync with go1.18.10 + - sync with go1.17.13 + - sync with go1.16.15 + - sync with go1.15.15 + - sync with go1.14.15 + +## Previous History + +Based on the initial PR/commit the based in a mix between go1.12 and go1.13: +- cache.go (go1.13) +- cache_test.go (go1.12?) +- default.go (go1.12?) +- hash.go (go1.13 and go1.12 are identical) +- hash_test.go -> (go1.12?) + +Adapted for golangci-lint: +- https://github.com/golangci/golangci-lint/pull/699: initial code (contains modifications of the files) +- https://github.com/golangci/golangci-lint/pull/779: just a nolint (`cache.go`) +- https://github.com/golangci/golangci-lint/pull/788: only directory permissions changes (0777 -> 0744) (`cache.go`, `cache_test.go`, `default.go`) +- https://github.com/golangci/golangci-lint/pull/808: mainly related to logs and errors (`cache.go`, `default.go`, `hash.go`, `hash_test.go`) +- https://github.com/golangci/golangci-lint/pull/1063: `ioutil` -> `robustio` (`cache.go`) +- https://github.com/golangci/golangci-lint/pull/1070: add `t.Parallel()` inside `cache_test.go` +- https://github.com/golangci/golangci-lint/pull/1162: errors inside `cache.go` +- https://github.com/golangci/golangci-lint/pull/2318: `ioutil` -> `os` (`cache.go`, `cache_test.go`, `default.go`, `hash_test.go`) +- https://github.com/golangci/golangci-lint/pull/2352: Go doc typos +- https://github.com/golangci/golangci-lint/pull/3012: errors inside `cache.go` (`cache.go`, `default.go`) +- https://github.com/golangci/golangci-lint/pull/3196: constant for `GOLANGCI_LINT_CACHE` (`cache.go`) +- https://github.com/golangci/golangci-lint/pull/3204: add this file and `%w` in `fmt.Errorf` (`cache.go`) +- https://github.com/golangci/golangci-lint/pull/3604: remove `github.com/pkg/errors` (`cache.go`) diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go new file mode 100644 index 0000000000..fcbd3e08c1 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go @@ -0,0 +1,31 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This package is a lightly modified version of the mmap code +// in github.com/google/codesearch/index. + +// The mmap package provides an abstraction for memory mapping files +// on different platforms. +package mmap + +import ( + "os" +) + +// Data is mmap'ed read-only data from a file. +// The backing file is never closed, so Data +// remains valid for the lifetime of the process. +type Data struct { + f *os.File + Data []byte +} + +// Mmap maps the given file into memory. +func Mmap(file string) (Data, error) { + f, err := os.Open(file) + if err != nil { + return Data{}, err + } + return mmapFile(f) +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go new file mode 100644 index 0000000000..4d2844fc37 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go @@ -0,0 +1,21 @@ +// Copyright 2022 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build (js && wasm) || wasip1 || plan9 + +package mmap + +import ( + "io" + "os" +) + +// mmapFile on other systems doesn't mmap the file. It just reads everything. +func mmapFile(f *os.File) (Data, error) { + b, err := io.ReadAll(f) + if err != nil { + return Data{}, err + } + return Data{f, b}, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go new file mode 100644 index 0000000000..5dce872368 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go @@ -0,0 +1,36 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build unix + +package mmap + +import ( + "fmt" + "io/fs" + "os" + "syscall" +) + +func mmapFile(f *os.File) (Data, error) { + st, err := f.Stat() + if err != nil { + return Data{}, err + } + size := st.Size() + pagesize := int64(os.Getpagesize()) + if int64(int(size+(pagesize-1))) != size+(pagesize-1) { + return Data{}, fmt.Errorf("%s: too large for mmap", f.Name()) + } + n := int(size) + if n == 0 { + return Data{f, nil}, nil + } + mmapLength := int(((size + pagesize - 1) / pagesize) * pagesize) // round up to page size + data, err := syscall.Mmap(int(f.Fd()), 0, mmapLength, syscall.PROT_READ, syscall.MAP_SHARED) + if err != nil { + return Data{}, &fs.PathError{Op: "mmap", Path: f.Name(), Err: err} + } + return Data{f, data[:n]}, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go new file mode 100644 index 0000000000..479ee30754 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go @@ -0,0 +1,41 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package mmap + +import ( + "fmt" + "os" + "syscall" + "unsafe" + + "golang.org/x/sys/windows" +) + +func mmapFile(f *os.File) (Data, error) { + st, err := f.Stat() + if err != nil { + return Data{}, err + } + size := st.Size() + if size == 0 { + return Data{f, nil}, nil + } + h, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, 0, 0, nil) + if err != nil { + return Data{}, fmt.Errorf("CreateFileMapping %s: %w", f.Name(), err) + } + + addr, err := syscall.MapViewOfFile(h, syscall.FILE_MAP_READ, 0, 0, 0) + if err != nil { + return Data{}, fmt.Errorf("MapViewOfFile %s: %w", f.Name(), err) + } + var info windows.MemoryBasicInformation + err = windows.VirtualQuery(addr, &info, unsafe.Sizeof(info)) + if err != nil { + return Data{}, fmt.Errorf("VirtualQuery %s: %w", f.Name(), err) + } + data := unsafe.Slice((*byte)(unsafe.Pointer(addr)), int(info.RegionSize)) + return Data{f, data}, nil +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md new file mode 100644 index 0000000000..f68aef097c --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md @@ -0,0 +1,15 @@ +# mmap + +Extracted from `go/src/cmd/go/internal/mmap/` (related to `cache`). +This is just a copy of the Go code without any changes. + +## History + +- https://github.com/golangci/golangci-lint/pull/5100 + - Move package from `internal/mmap` to `internal/go/mmap` +- https://github.com/golangci/golangci-lint/pull/5098 + - sync with go1.23.2 + - sync with go1.22.8 + - sync with go1.21.13 + - sync with go1.20.14 + - sync with go1.19.13 diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go new file mode 100644 index 0000000000..a812275073 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go @@ -0,0 +1,129 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package quoted provides string manipulation utilities. +package quoted + +import ( + "flag" + "fmt" + "strings" + "unicode" +) + +func isSpaceByte(c byte) bool { + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} + +// Split splits s into a list of fields, +// allowing single or double quotes around elements. +// There is no unescaping or other processing within +// quoted fields. +// +// Keep in sync with cmd/dist/quoted.go +func Split(s string) ([]string, error) { + // Split fields allowing '' or "" around elements. + // Quotes further inside the string do not count. + var f []string + for len(s) > 0 { + for len(s) > 0 && isSpaceByte(s[0]) { + s = s[1:] + } + if len(s) == 0 { + break + } + // Accepted quoted string. No unescaping inside. + if s[0] == '"' || s[0] == '\'' { + quote := s[0] + s = s[1:] + i := 0 + for i < len(s) && s[i] != quote { + i++ + } + if i >= len(s) { + return nil, fmt.Errorf("unterminated %c string", quote) + } + f = append(f, s[:i]) + s = s[i+1:] + continue + } + i := 0 + for i < len(s) && !isSpaceByte(s[i]) { + i++ + } + f = append(f, s[:i]) + s = s[i:] + } + return f, nil +} + +// Join joins a list of arguments into a string that can be parsed +// with Split. Arguments are quoted only if necessary; arguments +// without spaces or quotes are kept as-is. No argument may contain both +// single and double quotes. +func Join(args []string) (string, error) { + var buf []byte + for i, arg := range args { + if i > 0 { + buf = append(buf, ' ') + } + var sawSpace, sawSingleQuote, sawDoubleQuote bool + for _, c := range arg { + switch { + case c > unicode.MaxASCII: + continue + case isSpaceByte(byte(c)): + sawSpace = true + case c == '\'': + sawSingleQuote = true + case c == '"': + sawDoubleQuote = true + } + } + switch { + case !sawSpace && !sawSingleQuote && !sawDoubleQuote: + buf = append(buf, arg...) + + case !sawSingleQuote: + buf = append(buf, '\'') + buf = append(buf, arg...) + buf = append(buf, '\'') + + case !sawDoubleQuote: + buf = append(buf, '"') + buf = append(buf, arg...) + buf = append(buf, '"') + + default: + return "", fmt.Errorf("argument %q contains both single and double quotes and cannot be quoted", arg) + } + } + return string(buf), nil +} + +// A Flag parses a list of string arguments encoded with Join. +// It is useful for flags like cmd/link's -extldflags. +type Flag []string + +var _ flag.Value = (*Flag)(nil) + +func (f *Flag) Set(v string) error { + fs, err := Split(v) + if err != nil { + return err + } + *f = fs[:len(fs):len(fs)] + return nil +} + +func (f *Flag) String() string { + if f == nil { + return "" + } + s, err := Join(*f) + if err != nil { + return strings.Join(*f, " ") + } + return s +} diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md new file mode 100644 index 0000000000..a5e4c4bb3b --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md @@ -0,0 +1,13 @@ +# quoted + +Extracted from `go/src/cmd/internal/quoted/` (related to `cache`). +This is just a copy of the Go code without any changes. + +## History + +- https://github.com/golangci/golangci-lint/pull/5100 + - Move package from `internal/quoted` to `internal/go/quoted` +- https://github.com/golangci/golangci-lint/pull/5098 + - sync go1.23.2 + - sync go1.22.8 + - sync go1.21.13 diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md similarity index 64% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md index 7c7ba0483a..f4dbc16264 100644 --- a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md +++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md @@ -4,3 +4,8 @@ Extracted from go1.19.1/src/cmd/go/internal/robustio There is only one modification: - ERROR_SHARING_VIOLATION extracted from go1.19.1/src/internal/syscall/windows/syscall_windows.go to remove the dependencies to `internal/syscall/windows` + +## History + +- https://github.com/golangci/golangci-lint/pull/5100 + - Move package from `internal/robustio` to `internal/go/robustio` diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go similarity index 100% rename from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go rename to vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go deleted file mode 100644 index 3b3422eb7a..0000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go +++ /dev/null @@ -1,229 +0,0 @@ -package pkgcache - -import ( - "bytes" - "encoding/gob" - "encoding/hex" - "errors" - "fmt" - "runtime" - "sort" - "sync" - - "golang.org/x/tools/go/packages" - - "github.com/golangci/golangci-lint/internal/cache" - "github.com/golangci/golangci-lint/pkg/logutils" - "github.com/golangci/golangci-lint/pkg/timeutils" -) - -type HashMode int - -const ( - HashModeNeedOnlySelf HashMode = iota - HashModeNeedDirectDeps - HashModeNeedAllDeps -) - -// Cache is a per-package data cache. A cached data is invalidated when -// package, or it's dependencies change. -type Cache struct { - lowLevelCache *cache.Cache - pkgHashes sync.Map - sw *timeutils.Stopwatch - log logutils.Log // not used now, but may be needed for future debugging purposes - ioSem chan struct{} // semaphore limiting parallel IO -} - -func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) { - c, err := cache.Default() - if err != nil { - return nil, err - } - return &Cache{ - lowLevelCache: c, - sw: sw, - log: log, - ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)), - }, nil -} - -func (c *Cache) Trim() { - c.sw.TrackStage("trim", func() { - c.lowLevelCache.Trim() - }) -} - -func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error { - var err error - buf := &bytes.Buffer{} - c.sw.TrackStage("gob", func() { - err = gob.NewEncoder(buf).Encode(data) - }) - if err != nil { - return fmt.Errorf("failed to gob encode: %w", err) - } - - var aID cache.ActionID - - c.sw.TrackStage("key build", func() { - aID, err = c.pkgActionID(pkg, mode) - if err == nil { - subkey, subkeyErr := cache.Subkey(aID, key) - if subkeyErr != nil { - err = fmt.Errorf("failed to build subkey: %w", subkeyErr) - } - aID = subkey - } - }) - if err != nil { - return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) - } - c.ioSem <- struct{}{} - c.sw.TrackStage("cache io", func() { - err = c.lowLevelCache.PutBytes(aID, buf.Bytes()) - }) - <-c.ioSem - if err != nil { - return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err) - } - - return nil -} - -var ErrMissing = errors.New("missing data") - -func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error { - var aID cache.ActionID - var err error - c.sw.TrackStage("key build", func() { - aID, err = c.pkgActionID(pkg, mode) - if err == nil { - subkey, subkeyErr := cache.Subkey(aID, key) - if subkeyErr != nil { - err = fmt.Errorf("failed to build subkey: %w", subkeyErr) - } - aID = subkey - } - }) - if err != nil { - return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err) - } - - var b []byte - c.ioSem <- struct{}{} - c.sw.TrackStage("cache io", func() { - b, _, err = c.lowLevelCache.GetBytes(aID) - }) - <-c.ioSem - if err != nil { - if cache.IsErrMissing(err) { - return ErrMissing - } - return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err) - } - - c.sw.TrackStage("gob", func() { - err = gob.NewDecoder(bytes.NewReader(b)).Decode(data) - }) - if err != nil { - return fmt.Errorf("failed to gob decode: %w", err) - } - - return nil -} - -func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) { - hash, err := c.packageHash(pkg, mode) - if err != nil { - return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err) - } - - key, err := cache.NewHash("action ID") - if err != nil { - return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err) - } - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - fmt.Fprintf(key, "pkghash %s\n", hash) - - return key.Sum(), nil -} - -// packageHash computes a package's hash. The hash is based on all Go -// files that make up the package, as well as the hashes of imported -// packages. -func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) { - type hashResults map[HashMode]string - hashResI, ok := c.pkgHashes.Load(pkg) - if ok { - hashRes := hashResI.(hashResults) - if _, ok := hashRes[mode]; !ok { - return "", fmt.Errorf("no mode %d in hash result", mode) - } - return hashRes[mode], nil - } - - hashRes := hashResults{} - - key, err := cache.NewHash("package hash") - if err != nil { - return "", fmt.Errorf("failed to make a hash: %w", err) - } - - fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath) - for _, f := range pkg.CompiledGoFiles { - c.ioSem <- struct{}{} - h, fErr := cache.FileHash(f) - <-c.ioSem - if fErr != nil { - return "", fmt.Errorf("failed to calculate file %s hash: %w", f, fErr) - } - fmt.Fprintf(key, "file %s %x\n", f, h) - } - curSum := key.Sum() - hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:]) - - imps := make([]*packages.Package, 0, len(pkg.Imports)) - for _, imp := range pkg.Imports { - imps = append(imps, imp) - } - sort.Slice(imps, func(i, j int) bool { - return imps[i].PkgPath < imps[j].PkgPath - }) - - calcDepsHash := func(depMode HashMode) error { - for _, dep := range imps { - if dep.PkgPath == "unsafe" { - continue - } - - depHash, depErr := c.packageHash(dep, depMode) - if depErr != nil { - return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, depErr) - } - - fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash) - } - return nil - } - - if err := calcDepsHash(HashModeNeedOnlySelf); err != nil { - return "", err - } - - curSum = key.Sum() - hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:]) - - if err := calcDepsHash(HashModeNeedAllDeps); err != nil { - return "", err - } - curSum = key.Sum() - hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:]) - - if _, ok := hashRes[mode]; !ok { - return "", fmt.Errorf("invalid mode %d", mode) - } - - c.pkgHashes.Store(pkg, hashRes) - return hashRes[mode], nil -} diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md deleted file mode 100644 index 36ec6ed499..0000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md +++ /dev/null @@ -1,10 +0,0 @@ -# renameio - -Extracted from go/src/cmd/go/internal/renameio/ -I don't know what version of Go this package was pulled from. - -Adapted for golangci-lint: -- https://github.com/golangci/golangci-lint/pull/699 -- https://github.com/golangci/golangci-lint/pull/808 -- https://github.com/golangci/golangci-lint/pull/1063 -- https://github.com/golangci/golangci-lint/pull/3204 diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go deleted file mode 100644 index 2f88f4f7cc..0000000000 --- a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go +++ /dev/null @@ -1,93 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package renameio writes files atomically by renaming temporary files. -package renameio - -import ( - "bytes" - "io" - "math/rand" - "os" - "path/filepath" - "strconv" - - "github.com/golangci/golangci-lint/internal/robustio" -) - -const patternSuffix = ".tmp" - -// Pattern returns a glob pattern that matches the unrenamed temporary files -// created when writing to filename. -func Pattern(filename string) string { - return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix) -} - -// WriteFile is like os.WriteFile, but first writes data to an arbitrary -// file in the same directory as filename, then renames it atomically to the -// final name. -// -// That ensures that the final location, if it exists, is always a complete file. -func WriteFile(filename string, data []byte, perm os.FileMode) (err error) { - return WriteToFile(filename, bytes.NewReader(data), perm) -} - -// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader -// instead of a slice. -func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) { - f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm) - if err != nil { - return err - } - defer func() { - // Only call os.Remove on f.Name() if we failed to rename it: otherwise, - // some other process may have created a new file with the same name after - // that. - if err != nil { - f.Close() - os.Remove(f.Name()) - } - }() - - if _, err := io.Copy(f, data); err != nil { - return err - } - // Sync the file before renaming it: otherwise, after a crash the reader may - // observe a 0-length file instead of the actual contents. - // See https://golang.org/issue/22397#issuecomment-380831736. - if err := f.Sync(); err != nil { - return err - } - if err := f.Close(); err != nil { - return err - } - - return robustio.Rename(f.Name(), filename) -} - -// tempFile creates a new temporary file with given permission bits. -func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) { - for i := 0; i < 10000; i++ { - name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix) - f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm) - if os.IsExist(err) { - continue - } - break - } - return -} - -// ReadFile is like os.ReadFile, but on Windows retries spurious errors that -// may occur if the file is concurrently replaced. -// -// Errors are classified heuristically and retries are bounded, so even this -// function may occasionally return a spurious error on Windows. -// If so, the error will likely wrap one of: -// - syscall.ERROR_ACCESS_DENIED -// - syscall.ERROR_FILE_NOT_FOUND -// - internal/syscall/windows.ERROR_SHARING_VIOLATION -func ReadFile(filename string) ([]byte, error) { - return robustio.ReadFile(filename) -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go index cc6c0eacd5..4f2c812dce 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go @@ -62,6 +62,7 @@ func (*cacheCommand) executeClean(_ *cobra.Command, _ []string) error { func (*cacheCommand) executeStatus(_ *cobra.Command, _ []string) { cacheDir := cache.DefaultDir() + _, _ = fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir) cacheSizeBytes, err := dirSizeBytes(cacheDir) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go index f289bfdd79..ff7c5e467b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go @@ -28,7 +28,6 @@ import ( "gopkg.in/yaml.v3" "github.com/golangci/golangci-lint/internal/cache" - "github.com/golangci/golangci-lint/internal/pkgcache" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/fsutils" @@ -209,7 +208,7 @@ func (c *runCommand) preRunE(_ *cobra.Command, args []string) error { sw := timeutils.NewStopwatch("pkgcache", c.log.Child(logutils.DebugKeyStopwatch)) - pkgCache, err := pkgcache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache)) + pkgCache, err := cache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache)) if err != nil { return fmt.Errorf("failed to build packages cache: %w", err) } @@ -640,7 +639,7 @@ func initHashSalt(version string, cfg *config.Config) error { b := bytes.NewBuffer(binSalt) b.Write(configSalt) - cache.SetSalt(b.Bytes()) + cache.SetSalt(b) return nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go index 93b331bec4..b863b329f9 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go @@ -75,10 +75,9 @@ func IsGoGreaterThanOrEqual(current, limit string) bool { } func detectGoVersion() string { - file, _ := gomoddirectives.GetModuleFile() - - if file != nil && file.Go != nil && file.Go.Version != "" { - return file.Go.Version + goVersion := detectGoVersionFromGoMod() + if goVersion != "" { + return goVersion } v := os.Getenv("GOVERSION") @@ -88,3 +87,26 @@ func detectGoVersion() string { return "1.17" } + +// detectGoVersionFromGoMod tries to get Go version from go.mod. +// It returns `toolchain` version if present, +// else it returns `go` version if present, +// else it returns empty. +func detectGoVersionFromGoMod() string { + file, _ := gomoddirectives.GetModuleFile() + if file == nil { + return "" + } + + // The toolchain exists only if 'toolchain' version > 'go' version. + // If 'toolchain' version <= 'go' version, `go mod tidy` will remove 'toolchain' version from go.mod. + if file.Toolchain != nil && file.Toolchain.Name != "" { + return strings.TrimPrefix(file.Toolchain.Name, "go") + } + + if file.Go != nil && file.Go.Version != "" { + return file.Go.Version + } + + return "" +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go index 109de42431..b182d1e0f1 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go @@ -47,6 +47,9 @@ var defaultLintersSettings = LintersSettings{ Sections: []string{"standard", "default"}, SkipGenerated: true, }, + GoChecksumType: GoChecksumTypeSettings{ + DefaultSignifiesExhaustive: true, + }, Gocognit: GocognitSettings{ MinComplexity: 30, }, @@ -216,6 +219,7 @@ type LintersSettings struct { Gci GciSettings GinkgoLinter GinkgoLinterSettings Gocognit GocognitSettings + GoChecksumType GoChecksumTypeSettings Goconst GoConstSettings Gocritic GoCriticSettings Gocyclo GoCycloSettings @@ -225,7 +229,6 @@ type LintersSettings struct { Gofumpt GofumptSettings Goheader GoHeaderSettings Goimports GoImportsSettings - Gomnd GoMndSettings GoModDirectives GoModDirectivesSettings Gomodguard GoModGuardSettings Gosec GoSecSettings @@ -233,6 +236,7 @@ type LintersSettings struct { Gosmopolitan GosmopolitanSettings Govet GovetSettings Grouper GrouperSettings + Iface IfaceSettings ImportAs ImportAsSettings Inamedparam INamedParamSettings InterfaceBloat InterfaceBloatSettings @@ -483,6 +487,11 @@ type GinkgoLinterSettings struct { ForceExpectTo bool `mapstructure:"force-expect-to"` ValidateAsyncIntervals bool `mapstructure:"validate-async-intervals"` ForbidSpecPollution bool `mapstructure:"forbid-spec-pollution"` + ForceSucceedForFuncs bool `mapstructure:"force-succeed"` +} + +type GoChecksumTypeSettings struct { + DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"` } type GocognitSettings struct { @@ -560,14 +569,6 @@ type GoImportsSettings struct { LocalPrefixes string `mapstructure:"local-prefixes"` } -// Deprecated: use MndSettings. -type GoMndSettings struct { - MndSettings `mapstructure:",squash"` - - // Deprecated: use root level settings instead. - Settings map[string]map[string]any -} - type GoModDirectivesSettings struct { ReplaceAllowList []string `mapstructure:"replace-allow-list"` ReplaceLocal bool `mapstructure:"replace-local"` @@ -648,6 +649,11 @@ type GrouperSettings struct { VarRequireGrouping bool `mapstructure:"var-require-grouping"` } +type IfaceSettings struct { + Enable []string `mapstructure:"enable"` + Settings map[string]map[string]any `mapstructure:"settings"` +} + type ImportAsSettings struct { Alias []ImportAsAlias NoUnaliased bool `mapstructure:"no-unaliased"` @@ -725,7 +731,8 @@ type NestifSettings struct { } type NilNilSettings struct { - CheckedTypes []string `mapstructure:"checked-types"` + DetectOpposite bool `mapstructure:"detect-opposite"` + CheckedTypes []string `mapstructure:"checked-types"` } type NlreturnSettings struct { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go index efdbfce1f1..efeed3ca4d 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go @@ -302,17 +302,6 @@ func (l *Loader) handleGoVersion() { l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion - // staticcheck related linters. - if l.cfg.LintersSettings.Staticcheck.GoVersion == "" { - l.cfg.LintersSettings.Staticcheck.GoVersion = trimmedGoVersion - } - if l.cfg.LintersSettings.Gosimple.GoVersion == "" { - l.cfg.LintersSettings.Gosimple.GoVersion = trimmedGoVersion - } - if l.cfg.LintersSettings.Stylecheck.GoVersion == "" { - l.cfg.LintersSettings.Stylecheck.GoVersion = trimmedGoVersion - } - os.Setenv("GOSECGOVERSION", l.cfg.Run.Go) } @@ -413,12 +402,6 @@ func (l *Loader) handleLinterOptionDeprecations() { l.log.Warnf("The configuration option `linters.godot.check-all` is deprecated, please use `linters.godot.scope: all`.") } - // Deprecated since v1.44.0. - if len(l.cfg.LintersSettings.Gomnd.Settings) > 0 { - l.log.Warnf("The configuration option `linters.gomnd.settings` is deprecated. Please use the options " + - "`linters.gomnd.checks`,`linters.gomnd.ignored-numbers`,`linters.gomnd.ignored-files`,`linters.gomnd.ignored-functions`.") - } - // Deprecated since v1.47.0 if l.cfg.LintersSettings.Gofumpt.LangVersion != "" { l.log.Warnf("The configuration option `linters.gofumpt.lang-version` is deprecated, please use global `run.go`.") diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go index c1274ec09a..ac03c71ecc 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go @@ -1,8 +1,3 @@ -// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - // Package goanalysis defines the implementation of the checker commands. // The same code drives the multi-analysis driver, the single-analysis // driver that is conventionally provided for convenience along with @@ -21,8 +16,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" + "github.com/golangci/golangci-lint/internal/cache" "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/internal/pkgcache" "github.com/golangci/golangci-lint/pkg/goanalysis/load" "github.com/golangci/golangci-lint/pkg/logutils" "github.com/golangci/golangci-lint/pkg/timeutils" @@ -52,7 +47,7 @@ type Diagnostic struct { type runner struct { log logutils.Log prefix string // ensure unique analyzer names - pkgCache *pkgcache.Cache + pkgCache *cache.Cache loadGuard *load.Guard loadMode LoadMode passToPkg map[*analysis.Pass]*packages.Package @@ -60,7 +55,7 @@ type runner struct { sw *timeutils.Stopwatch } -func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard, +func newRunner(prefix string, logger logutils.Log, pkgCache *cache.Cache, loadGuard *load.Guard, loadMode LoadMode, sw *timeutils.Stopwatch, ) *runner { return &runner{ @@ -84,7 +79,6 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages []error, map[*analysis.Pass]*packages.Package, ) { debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode) - defer r.pkgCache.Trim() roots := r.analyze(initialPackages, analyzers) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go index 58ea297ea9..152cab1814 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go @@ -1,21 +1,10 @@ package goanalysis import ( - "errors" "fmt" - "go/types" - "io" - "reflect" "runtime/debug" - "time" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/packages" - "golang.org/x/tools/go/types/objectpath" "github.com/golangci/golangci-lint/internal/errorutil" - "github.com/golangci/golangci-lint/internal/pkgcache" - "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" ) type actionAllocator struct { @@ -39,54 +28,6 @@ func (actAlloc *actionAllocator) alloc() *action { return act } -// An action represents one unit of analysis work: the application of -// one analysis to one package. Actions form a DAG, both within a -// package (as different analyzers are applied, either in sequence or -// parallel), and across packages (as dependencies are analyzed). -type action struct { - a *analysis.Analyzer - pkg *packages.Package - pass *analysis.Pass - deps []*action - objectFacts map[objectFactKey]analysis.Fact - packageFacts map[packageFactKey]analysis.Fact - result any - diagnostics []analysis.Diagnostic - err error - r *runner - analysisDoneCh chan struct{} - loadCachedFactsDone bool - loadCachedFactsOk bool - isroot bool - isInitialPkg bool - needAnalyzeSource bool -} - -func (act *action) String() string { - return fmt.Sprintf("%s@%s", act.a, act.pkg) -} - -func (act *action) loadCachedFacts() bool { - if act.loadCachedFactsDone { // can't be set in parallel - return act.loadCachedFactsOk - } - - res := func() bool { - if act.isInitialPkg { - return true // load cached facts only for non-initial packages - } - - if len(act.a.FactTypes) == 0 { - return true // no need to load facts - } - - return act.loadPersistedFacts() - }() - act.loadCachedFactsDone = true - act.loadCachedFactsOk = res - return res -} - func (act *action) waitUntilDependingAnalyzersWorked() { for _, dep := range act.deps { if dep.pkg == act.pkg { @@ -109,268 +50,8 @@ func (act *action) analyzeSafe() { act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack()) } }() - act.r.sw.TrackStage(act.a.Name, func() { - act.analyze() - }) -} - -func (act *action) analyze() { - defer close(act.analysisDoneCh) // unblock actions depending on this action - - if !act.needAnalyzeSource { - return - } - - defer func(now time.Time) { - analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) - }(time.Now()) - - // Report an error if any dependency failures. - var depErrors error - for _, dep := range act.deps { - if dep.err == nil { - continue - } - - depErrors = errors.Join(depErrors, errors.Unwrap(dep.err)) - } - if depErrors != nil { - act.err = fmt.Errorf("failed prerequisites: %w", depErrors) - return - } - - // Plumb the output values of the dependencies - // into the inputs of this action. Also facts. - inputs := make(map[*analysis.Analyzer]any) - startedAt := time.Now() - for _, dep := range act.deps { - if dep.pkg == act.pkg { - // Same package, different analysis (horizontal edge): - // in-memory outputs of prerequisite analyzers - // become inputs to this analysis pass. - inputs[dep.a] = dep.result - } else if dep.a == act.a { // (always true) - // Same analysis, different package (vertical edge): - // serialized facts produced by prerequisite analysis - // become available to this analysis pass. - inheritFacts(act, dep) - } - } - factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) - - // Run the analysis. - pass := &analysis.Pass{ - Analyzer: act.a, - Fset: act.pkg.Fset, - Files: act.pkg.Syntax, - OtherFiles: act.pkg.OtherFiles, - Pkg: act.pkg.Types, - TypesInfo: act.pkg.TypesInfo, - TypesSizes: act.pkg.TypesSizes, - ResultOf: inputs, - Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, - ImportObjectFact: act.importObjectFact, - ExportObjectFact: act.exportObjectFact, - ImportPackageFact: act.importPackageFact, - ExportPackageFact: act.exportPackageFact, - AllObjectFacts: act.allObjectFacts, - AllPackageFacts: act.allPackageFacts, - } - act.pass = pass - act.r.passToPkgGuard.Lock() - act.r.passToPkg[pass] = act.pkg - act.r.passToPkgGuard.Unlock() - - if act.pkg.IllTyped { - // It looks like there should be !pass.Analyzer.RunDespiteErrors - // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, - // but it exits before it if packages.Load have failed. - act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg}) - } else { - startedAt = time.Now() - act.result, act.err = pass.Analyzer.Run(pass) - analyzedIn := time.Since(startedAt) - if analyzedIn > time.Millisecond*10 { - debugf("%s: run analyzer in %s", act, analyzedIn) - } - } - - // disallow calls after Run - pass.ExportObjectFact = nil - pass.ExportPackageFact = nil - - if err := act.persistFactsToCache(); err != nil { - act.r.log.Warnf("Failed to persist facts to cache: %s", err) - } -} - -// importObjectFact implements Pass.ImportObjectFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// importObjectFact copies the fact value to *ptr. -func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { - if obj == nil { - panic("nil object") - } - key := objectFactKey{obj, act.factType(ptr)} - if v, ok := act.objectFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportObjectFact implements Pass.ExportObjectFact. -func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { - if obj.Pkg() != act.pkg.Types { - act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", - act.a, act.pkg, obj, fact) - } - - key := objectFactKey{obj, act.factType(fact)} - act.objectFacts[key] = fact // clobber any existing entry - if isFactsExportDebug { - objstr := types.ObjectString(obj, (*types.Package).Name) - factsExportDebugf("%s: object %s has fact %s\n", - act.pkg.Fset.Position(obj.Pos()), objstr, fact) - } -} - -func (act *action) allObjectFacts() []analysis.ObjectFact { - out := make([]analysis.ObjectFact, 0, len(act.objectFacts)) - for key, fact := range act.objectFacts { - out = append(out, analysis.ObjectFact{ - Object: key.obj, - Fact: fact, - }) - } - return out -} - -// importPackageFact implements Pass.ImportPackageFact. -// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, -// fact copies the fact value to *ptr. -func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { - if pkg == nil { - panic("nil package") - } - key := packageFactKey{pkg, act.factType(ptr)} - if v, ok := act.packageFacts[key]; ok { - reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) - return true - } - return false -} - -// exportPackageFact implements Pass.ExportPackageFact. -func (act *action) exportPackageFact(fact analysis.Fact) { - key := packageFactKey{act.pass.Pkg, act.factType(fact)} - act.packageFacts[key] = fact // clobber any existing entry - factsDebugf("%s: package %s has fact %s\n", - act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) -} - -func (act *action) allPackageFacts() []analysis.PackageFact { - out := make([]analysis.PackageFact, 0, len(act.packageFacts)) - for key, fact := range act.packageFacts { - out = append(out, analysis.PackageFact{ - Package: key.pkg, - Fact: fact, - }) - } - return out -} - -func (act *action) factType(fact analysis.Fact) reflect.Type { - t := reflect.TypeOf(fact) - if t.Kind() != reflect.Ptr { - act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t) - } - return t -} - -func (act *action) persistFactsToCache() error { - analyzer := act.a - if len(analyzer.FactTypes) == 0 { - return nil - } - - // Merge new facts into the package and persist them. - var facts []Fact - for key, fact := range act.packageFacts { - if key.pkg != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - facts = append(facts, Fact{ - Path: "", - Fact: fact, - }) - } - for key, fact := range act.objectFacts { - obj := key.obj - if obj.Pkg() != act.pkg.Types { - // The fact is from inherited facts from another package - continue - } - - path, err := objectpath.For(obj) - if err != nil { - // The object is not globally addressable - continue - } - - facts = append(facts, Fact{ - Path: string(path), - Fact: fact, - }) - } - - factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - key := fmt.Sprintf("%s/facts", analyzer.Name) - return act.r.pkgCache.Put(act.pkg, pkgcache.HashModeNeedAllDeps, key, facts) -} - -func (act *action) loadPersistedFacts() bool { - var facts []Fact - key := fmt.Sprintf("%s/facts", act.a.Name) - if err := act.r.pkgCache.Get(act.pkg, pkgcache.HashModeNeedAllDeps, key, &facts); err != nil { - if !errors.Is(err, pkgcache.ErrMissing) && !errors.Is(err, io.EOF) { - act.r.log.Warnf("Failed to get persisted facts: %s", err) - } - - factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) - return false - } - - factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) - - for _, f := range facts { - if f.Path == "" { // this is a package fact - key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} - act.packageFacts[key] = f.Fact - continue - } - obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) - if err != nil { - // Be lenient about these errors. For example, when - // analyzing io/ioutil from source, we may get a fact - // for methods on the devNull type, and objectpath - // will happily create a path for them. However, when - // we later load io/ioutil from export data, the path - // no longer resolves. - // - // If an exported type embeds the unexported type, - // then (part of) the unexported type will become part - // of the type information and our path will resolve - // again. - continue - } - factKey := objectFactKey{obj, act.factType(f.Fact)} - act.objectFacts[factKey] = f.Fact - } - return true + act.r.sw.TrackStage(act.a.Name, act.analyze) } func (act *action) markDepsForAnalyzingSource() { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go new file mode 100644 index 0000000000..fbc2f82fa9 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go @@ -0,0 +1,127 @@ +package goanalysis + +import ( + "errors" + "fmt" + "io" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/types/objectpath" + + "github.com/golangci/golangci-lint/internal/cache" +) + +type Fact struct { + Path string // non-empty only for object facts + Fact analysis.Fact +} + +func (act *action) loadCachedFacts() bool { + if act.loadCachedFactsDone { // can't be set in parallel + return act.loadCachedFactsOk + } + + res := func() bool { + if act.isInitialPkg { + return true // load cached facts only for non-initial packages + } + + if len(act.a.FactTypes) == 0 { + return true // no need to load facts + } + + return act.loadPersistedFacts() + }() + act.loadCachedFactsDone = true + act.loadCachedFactsOk = res + return res +} + +func (act *action) persistFactsToCache() error { + analyzer := act.a + if len(analyzer.FactTypes) == 0 { + return nil + } + + // Merge new facts into the package and persist them. + var facts []Fact + for key, fact := range act.packageFacts { + if key.pkg != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + facts = append(facts, Fact{ + Path: "", + Fact: fact, + }) + } + for key, fact := range act.objectFacts { + obj := key.obj + if obj.Pkg() != act.pkg.Types { + // The fact is from inherited facts from another package + continue + } + + path, err := objectpath.For(obj) + if err != nil { + // The object is not globally addressable + continue + } + + facts = append(facts, Fact{ + Path: string(path), + Fact: fact, + }) + } + + factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + return act.r.pkgCache.Put(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(analyzer), facts) +} + +func (act *action) loadPersistedFacts() bool { + var facts []Fact + + err := act.r.pkgCache.Get(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(act.a), &facts) + if err != nil { + if !errors.Is(err, cache.ErrMissing) && !errors.Is(err, io.EOF) { + act.r.log.Warnf("Failed to get persisted facts: %s", err) + } + + factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name) + return false + } + + factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name) + + for _, f := range facts { + if f.Path == "" { // this is a package fact + key := packageFactKey{act.pkg.Types, act.factType(f.Fact)} + act.packageFacts[key] = f.Fact + continue + } + obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path)) + if err != nil { + // Be lenient about these errors. For example, when + // analyzing io/ioutil from source, we may get a fact + // for methods on the devNull type, and objectpath + // will happily create a path for them. However, when + // we later load io/ioutil from export data, the path + // no longer resolves. + // + // If an exported type embeds the unexported type, + // then (part of) the unexported type will become part + // of the type information and our path will resolve + // again. + continue + } + factKey := objectFactKey{obj, act.factType(f.Fact)} + act.objectFacts[factKey] = f.Fact + } + + return true +} + +func factCacheKey(a *analysis.Analyzer) string { + return fmt.Sprintf("%s/facts", a.Name) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go new file mode 100644 index 0000000000..d868f8f5da --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go @@ -0,0 +1,370 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. +// +// Partial copy of https://github.com/golang/tools/blob/dba5486c2a1d03519930812112b23ed2c45c04fc/go/analysis/internal/checker/checker.go + +package goanalysis + +import ( + "bytes" + "encoding/gob" + "errors" + "fmt" + "go/types" + "reflect" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" +) + +// NOTE(ldez) altered: custom fields; remove 'once' and 'duration'. +// An action represents one unit of analysis work: the application of +// one analysis to one package. Actions form a DAG, both within a +// package (as different analyzers are applied, either in sequence or +// parallel), and across packages (as dependencies are analyzed). +type action struct { + a *analysis.Analyzer + pkg *packages.Package + pass *analysis.Pass + isroot bool + deps []*action + objectFacts map[objectFactKey]analysis.Fact + packageFacts map[packageFactKey]analysis.Fact + result any + diagnostics []analysis.Diagnostic + err error + + // NOTE(ldez) custom fields. + r *runner + analysisDoneCh chan struct{} + loadCachedFactsDone bool + loadCachedFactsOk bool + isInitialPkg bool + needAnalyzeSource bool +} + +// NOTE(ldez) no alteration. +type objectFactKey struct { + obj types.Object + typ reflect.Type +} + +// NOTE(ldez) no alteration. +type packageFactKey struct { + pkg *types.Package + typ reflect.Type +} + +// NOTE(ldez) no alteration. +func (act *action) String() string { + return fmt.Sprintf("%s@%s", act.a, act.pkg) +} + +// NOTE(ldez) altered version of `func (act *action) execOnce()`. +func (act *action) analyze() { + defer close(act.analysisDoneCh) // unblock actions depending on this action + + if !act.needAnalyzeSource { + return + } + + defer func(now time.Time) { + analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now)) + }(time.Now()) + + // Report an error if any dependency failures. + var depErrors error + for _, dep := range act.deps { + if dep.err != nil { + depErrors = errors.Join(depErrors, errors.Unwrap(dep.err)) + } + } + + if depErrors != nil { + act.err = fmt.Errorf("failed prerequisites: %w", depErrors) + return + } + + // Plumb the output values of the dependencies + // into the inputs of this action. Also facts. + inputs := make(map[*analysis.Analyzer]any) + act.objectFacts = make(map[objectFactKey]analysis.Fact) + act.packageFacts = make(map[packageFactKey]analysis.Fact) + startedAt := time.Now() + + for _, dep := range act.deps { + if dep.pkg == act.pkg { + // Same package, different analysis (horizontal edge): + // in-memory outputs of prerequisite analyzers + // become inputs to this analysis pass. + inputs[dep.a] = dep.result + } else if dep.a == act.a { // (always true) + // Same analysis, different package (vertical edge): + // serialized facts produced by prerequisite analysis + // become available to this analysis pass. + inheritFacts(act, dep) + } + } + + factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt)) + + module := &analysis.Module{} // possibly empty (non nil) in go/analysis drivers. + if mod := act.pkg.Module; mod != nil { + module.Path = mod.Path + module.Version = mod.Version + module.GoVersion = mod.GoVersion + } + + // Run the analysis. + pass := &analysis.Pass{ + Analyzer: act.a, + Fset: act.pkg.Fset, + Files: act.pkg.Syntax, + OtherFiles: act.pkg.OtherFiles, + IgnoredFiles: act.pkg.IgnoredFiles, + Pkg: act.pkg.Types, + TypesInfo: act.pkg.TypesInfo, + TypesSizes: act.pkg.TypesSizes, + TypeErrors: act.pkg.TypeErrors, + Module: module, + + ResultOf: inputs, + Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) }, + ImportObjectFact: act.importObjectFact, + ExportObjectFact: act.exportObjectFact, + ImportPackageFact: act.importPackageFact, + ExportPackageFact: act.exportPackageFact, + AllObjectFacts: act.allObjectFacts, + AllPackageFacts: act.allPackageFacts, + } + + act.pass = pass + act.r.passToPkgGuard.Lock() + act.r.passToPkg[pass] = act.pkg + act.r.passToPkgGuard.Unlock() + + if act.pkg.IllTyped { + // It looks like there should be !pass.Analyzer.RunDespiteErrors + // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here, + // but it exits before it if packages.Load have failed. + act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg}) + } else { + startedAt = time.Now() + + act.result, act.err = pass.Analyzer.Run(pass) + + analyzedIn := time.Since(startedAt) + if analyzedIn > time.Millisecond*10 { + debugf("%s: run analyzer in %s", act, analyzedIn) + } + } + + // disallow calls after Run + pass.ExportObjectFact = nil + pass.ExportPackageFact = nil + + err := act.persistFactsToCache() + if err != nil { + act.r.log.Warnf("Failed to persist facts to cache: %s", err) + } +} + +// NOTE(ldez) altered: logger; serialize. +// inheritFacts populates act.facts with +// those it obtains from its dependency, dep. +func inheritFacts(act, dep *action) { + const serialize = false + + for key, fact := range dep.objectFacts { + // Filter out facts related to objects + // that are irrelevant downstream + // (equivalently: not in the compiler export data). + if !exportedFrom(key.obj, dep.pkg.Types) { + factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) + continue + } + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces. + if serialize { + encodedFact, err := codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err) + } + fact = encodedFact + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) + + act.objectFacts[key] = fact + } + + for key, fact := range dep.packageFacts { + // TODO: filter out facts that belong to + // packages not mentioned in the export data + // to prevent side channels. + + // Optionally serialize/deserialize fact + // to verify that it works across address spaces + // and is deterministic. + if serialize { + encodedFact, err := codeFact(fact) + if err != nil { + act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) + } + fact = encodedFact + } + + factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) + + act.packageFacts[key] = fact + } +} + +// NOTE(ldez) no alteration. +// codeFact encodes then decodes a fact, +// just to exercise that logic. +func codeFact(fact analysis.Fact) (analysis.Fact, error) { + // We encode facts one at a time. + // A real modular driver would emit all facts + // into one encoder to improve gob efficiency. + var buf bytes.Buffer + if err := gob.NewEncoder(&buf).Encode(fact); err != nil { + return nil, err + } + + // Encode it twice and assert that we get the same bits. + // This helps detect nondeterministic Gob encoding (e.g. of maps). + var buf2 bytes.Buffer + if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { + return nil, err + } + if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { + return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) + } + + newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) + if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { + return nil, err + } + return newFact, nil +} + +// NOTE(ldez) no alteration. +// exportedFrom reports whether obj may be visible to a package that imports pkg. +// This includes not just the exported members of pkg, but also unexported +// constants, types, fields, and methods, perhaps belonging to other packages, +// that find there way into the API. +// This is an over-approximation of the more accurate approach used by +// gc export data, which walks the type graph, but it's much simpler. +// +// TODO(adonovan): do more accurate filtering by walking the type graph. +func exportedFrom(obj types.Object, pkg *types.Package) bool { + switch obj := obj.(type) { + case *types.Func: + return obj.Exported() && obj.Pkg() == pkg || + obj.Type().(*types.Signature).Recv() != nil + case *types.Var: + if obj.IsField() { + return true + } + // we can't filter more aggressively than this because we need + // to consider function parameters exported, but have no way + // of telling apart function parameters from local variables. + return obj.Pkg() == pkg + case *types.TypeName, *types.Const: + return true + } + return false // Nil, Builtin, Label, or PkgName +} + +// NOTE(ldez) altered: logger; `act.factType` +// importObjectFact implements Pass.ImportObjectFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// importObjectFact copies the fact value to *ptr. +func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool { + if obj == nil { + panic("nil object") + } + key := objectFactKey{obj, act.factType(ptr)} + if v, ok := act.objectFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`. +// exportObjectFact implements Pass.ExportObjectFact. +func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) { + if obj.Pkg() != act.pkg.Types { + act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package", + act.a, act.pkg, obj, fact) + } + + key := objectFactKey{obj, act.factType(fact)} + act.objectFacts[key] = fact // clobber any existing entry + if isFactsExportDebug { + objstr := types.ObjectString(obj, (*types.Package).Name) + + factsExportDebugf("%s: object %s has fact %s\n", + act.pkg.Fset.Position(obj.Pos()), objstr, fact) + } +} + +// NOTE(ldez) no alteration. +func (act *action) allObjectFacts() []analysis.ObjectFact { + facts := make([]analysis.ObjectFact, 0, len(act.objectFacts)) + for k := range act.objectFacts { + facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]}) + } + return facts +} + +// NOTE(ldez) altered: `act.factType` +// importPackageFact implements Pass.ImportPackageFact. +// Given a non-nil pointer ptr of type *T, where *T satisfies Fact, +// fact copies the fact value to *ptr. +func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool { + if pkg == nil { + panic("nil package") + } + key := packageFactKey{pkg, act.factType(ptr)} + if v, ok := act.packageFacts[key]; ok { + reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem()) + return true + } + return false +} + +// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`. +// exportPackageFact implements Pass.ExportPackageFact. +func (act *action) exportPackageFact(fact analysis.Fact) { + key := packageFactKey{act.pass.Pkg, act.factType(fact)} + act.packageFacts[key] = fact // clobber any existing entry + + factsDebugf("%s: package %s has fact %s\n", + act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact) +} + +// NOTE(ldez) altered: add receiver to handle logs. +func (act *action) factType(fact analysis.Fact) reflect.Type { + t := reflect.TypeOf(fact) + if t.Kind() != reflect.Ptr { + act.r.log.Fatalf("invalid Fact type: got %T, want pointer", fact) + } + return t +} + +// NOTE(ldez) no alteration. +func (act *action) allPackageFacts() []analysis.PackageFact { + facts := make([]analysis.PackageFact, 0, len(act.packageFacts)) + for k := range act.packageFacts { + facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]}) + } + return facts +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go deleted file mode 100644 index 1d0fb974e7..0000000000 --- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go +++ /dev/null @@ -1,125 +0,0 @@ -package goanalysis - -import ( - "bytes" - "encoding/gob" - "fmt" - "go/types" - "reflect" - - "golang.org/x/tools/go/analysis" -) - -type objectFactKey struct { - obj types.Object - typ reflect.Type -} - -type packageFactKey struct { - pkg *types.Package - typ reflect.Type -} - -type Fact struct { - Path string // non-empty only for object facts - Fact analysis.Fact -} - -// inheritFacts populates act.facts with -// those it obtains from its dependency, dep. -func inheritFacts(act, dep *action) { - serialize := false - - for key, fact := range dep.objectFacts { - // Filter out facts related to objects - // that are irrelevant downstream - // (equivalently: not in the compiler export data). - if !exportedFrom(key.obj, dep.pkg.Types) { - factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact) - continue - } - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact) - act.objectFacts[key] = fact - } - - for key, fact := range dep.packageFacts { - // TODO: filter out facts that belong to - // packages not mentioned in the export data - // to prevent side channels. - - // Optionally serialize/deserialize fact - // to verify that it works across address spaces - // and is deterministic. - if serialize { - var err error - fact, err = codeFact(fact) - if err != nil { - act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act) - } - } - - factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact) - act.packageFacts[key] = fact - } -} - -// codeFact encodes then decodes a fact, -// just to exercise that logic. -func codeFact(fact analysis.Fact) (analysis.Fact, error) { - // We encode facts one at a time. - // A real modular driver would emit all facts - // into one encoder to improve gob efficiency. - var buf bytes.Buffer - if err := gob.NewEncoder(&buf).Encode(fact); err != nil { - return nil, err - } - - // Encode it twice and assert that we get the same bits. - // This helps detect nondeterministic Gob encoding (e.g. of maps). - var buf2 bytes.Buffer - if err := gob.NewEncoder(&buf2).Encode(fact); err != nil { - return nil, err - } - if !bytes.Equal(buf.Bytes(), buf2.Bytes()) { - return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact) - } - - newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact) - if err := gob.NewDecoder(&buf).Decode(newFact); err != nil { - return nil, err - } - return newFact, nil -} - -// exportedFrom reports whether obj may be visible to a package that imports pkg. -// This includes not just the exported members of pkg, but also unexported -// constants, types, fields, and methods, perhaps belonging to other packages, -// that find there way into the API. -// This is an over-approximation of the more accurate approach used by -// gc export data, which walks the type graph, but it's much simpler. -// -// TODO(adonovan): do more accurate filtering by walking the type graph. -func exportedFrom(obj types.Object, pkg *types.Package) bool { - switch obj := obj.(type) { - case *types.Func: - return obj.Exported() && obj.Pkg() == pkg || - obj.Type().(*types.Signature).Recv() != nil - case *types.Var: - return obj.Exported() && obj.Pkg() == pkg || - obj.IsField() - case *types.TypeName, *types.Const: - return true - } - return false // Nil, Builtin, Label, or PkgName -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go index 8abe2b6c1c..44d6769586 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go @@ -4,11 +4,13 @@ import ( "errors" "fmt" "go/ast" + "go/build" "go/parser" "go/scanner" "go/types" "os" "reflect" + "strings" "sync" "sync/atomic" @@ -152,10 +154,15 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { return imp.Types, nil } - // TODO(ldez) temporary workaround - rv, err := goutil.CleanRuntimeVersion() - if err != nil { - return err + var goVersion string + if pkg.Module != nil && pkg.Module.GoVersion != "" { + goVersion = "go" + strings.TrimPrefix(pkg.Module.GoVersion, "go") + } else { + var err error + goVersion, err = goutil.CleanRuntimeVersion() + if err != nil { + return err + } } tc := &types.Config{ @@ -163,7 +170,8 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error { Error: func(err error) { pkg.Errors = append(pkg.Errors, lp.convertError(err)...) }, - GoVersion: rv, // TODO(ldez) temporary workaround + GoVersion: goVersion, + Sizes: types.SizesFor(build.Default.Compiler, build.Default.GOARCH), } _ = types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go index 79e52f52a3..a9aee03a2b 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go @@ -2,17 +2,10 @@ package goanalysis import ( "fmt" - "runtime" - "sort" - "strings" - "sync" - "sync/atomic" - "time" "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" - "github.com/golangci/golangci-lint/internal/pkgcache" "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/logutils" @@ -119,156 +112,3 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st } return issues } - -func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { - return "lint/result:" + analyzersHashID(analyzers) -} - -func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, - issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer, -) { - startedAt := time.Now() - perPkgIssues := map[*packages.Package][]result.Issue{} - for ind := range issues { - i := &issues[ind] - perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) - } - - var savedIssuesCount int64 = 0 - lintResKey := getIssuesCacheKey(analyzers) - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(allPkgs)) - for i := 0; i < workerCount; i++ { - go func() { - defer wg.Done() - for pkg := range pkgCh { - pkgIssues := perPkgIssues[pkg] - encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) - for ind := range pkgIssues { - i := &pkgIssues[ind] - encodedIssues = append(encodedIssues, EncodingIssue{ - FromLinter: i.FromLinter, - Text: i.Text, - Severity: i.Severity, - Pos: i.Pos, - LineRange: i.LineRange, - Replacement: i.Replacement, - ExpectNoLint: i.ExpectNoLint, - ExpectedNoLintLinter: i.ExpectedNoLintLinter, - }) - } - - atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues))) - if err := lintCtx.PkgCache.Put(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { - lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) - } else { - issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) - } - } - }() - } - - for _, pkg := range allPkgs { - if pkgsFromCache[pkg] { - continue - } - - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) -} - -func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, - analyzers []*analysis.Analyzer, -) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) { - startedAt := time.Now() - - lintResKey := getIssuesCacheKey(analyzers) - type cacheRes struct { - issues []result.Issue - loadErr error - } - pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) - for _, pkg := range pkgs { - pkgToCacheRes[pkg] = &cacheRes{} - } - - workerCount := runtime.GOMAXPROCS(-1) - var wg sync.WaitGroup - wg.Add(workerCount) - - pkgCh := make(chan *packages.Package, len(pkgs)) - for range workerCount { - go func() { - defer wg.Done() - for pkg := range pkgCh { - var pkgIssues []EncodingIssue - err := lintCtx.PkgCache.Get(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, &pkgIssues) - cacheRes := pkgToCacheRes[pkg] - cacheRes.loadErr = err - if err != nil { - continue - } - if len(pkgIssues) == 0 { - continue - } - - issues := make([]result.Issue, 0, len(pkgIssues)) - for i := range pkgIssues { - issue := &pkgIssues[i] - issues = append(issues, result.Issue{ - FromLinter: issue.FromLinter, - Text: issue.Text, - Severity: issue.Severity, - Pos: issue.Pos, - LineRange: issue.LineRange, - Replacement: issue.Replacement, - Pkg: pkg, - ExpectNoLint: issue.ExpectNoLint, - ExpectedNoLintLinter: issue.ExpectedNoLintLinter, - }) - } - cacheRes.issues = issues - } - }() - } - - for _, pkg := range pkgs { - pkgCh <- pkg - } - close(pkgCh) - wg.Wait() - - loadedIssuesCount := 0 - pkgsFromCache = map[*packages.Package]bool{} - for pkg, cacheRes := range pkgToCacheRes { - if cacheRes.loadErr == nil { - loadedIssuesCount += len(cacheRes.issues) - pkgsFromCache[pkg] = true - issuesFromCache = append(issuesFromCache, cacheRes.issues...) - issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) - } else { - issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) - } - } - issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", - loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) - return issuesFromCache, pkgsFromCache -} - -func analyzersHashID(analyzers []*analysis.Analyzer) string { - names := make([]string, 0, len(analyzers)) - for _, a := range analyzers { - names = append(names, a.Name) - } - - sort.Strings(names) - return strings.Join(names, ",") -} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go new file mode 100644 index 0000000000..8c244688b4 --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go @@ -0,0 +1,172 @@ +package goanalysis + +import ( + "runtime" + "sort" + "strings" + "sync" + "sync/atomic" + "time" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/packages" + + "github.com/golangci/golangci-lint/internal/cache" + "github.com/golangci/golangci-lint/pkg/lint/linter" + "github.com/golangci/golangci-lint/pkg/result" +) + +func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool, + issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer, +) { + startedAt := time.Now() + perPkgIssues := map[*packages.Package][]result.Issue{} + for ind := range issues { + i := &issues[ind] + perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i) + } + + var savedIssuesCount int64 = 0 + lintResKey := getIssuesCacheKey(analyzers) + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(allPkgs)) + for i := 0; i < workerCount; i++ { + go func() { + defer wg.Done() + for pkg := range pkgCh { + pkgIssues := perPkgIssues[pkg] + encodedIssues := make([]EncodingIssue, 0, len(pkgIssues)) + for ind := range pkgIssues { + i := &pkgIssues[ind] + encodedIssues = append(encodedIssues, EncodingIssue{ + FromLinter: i.FromLinter, + Text: i.Text, + Severity: i.Severity, + Pos: i.Pos, + LineRange: i.LineRange, + Replacement: i.Replacement, + ExpectNoLint: i.ExpectNoLint, + ExpectedNoLintLinter: i.ExpectedNoLintLinter, + }) + } + + atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues))) + if err := lintCtx.PkgCache.Put(pkg, cache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil { + lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err) + } else { + issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues)) + } + } + }() + } + + for _, pkg := range allPkgs { + if pkgsFromCache[pkg] { + continue + } + + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + lintCtx.PkgCache.Close() + + issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt)) +} + +func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context, + analyzers []*analysis.Analyzer, +) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) { + startedAt := time.Now() + + lintResKey := getIssuesCacheKey(analyzers) + type cacheRes struct { + issues []result.Issue + loadErr error + } + pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs)) + for _, pkg := range pkgs { + pkgToCacheRes[pkg] = &cacheRes{} + } + + workerCount := runtime.GOMAXPROCS(-1) + var wg sync.WaitGroup + wg.Add(workerCount) + + pkgCh := make(chan *packages.Package, len(pkgs)) + for range workerCount { + go func() { + defer wg.Done() + for pkg := range pkgCh { + var pkgIssues []EncodingIssue + err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues) + cacheRes := pkgToCacheRes[pkg] + cacheRes.loadErr = err + if err != nil { + continue + } + if len(pkgIssues) == 0 { + continue + } + + issues := make([]result.Issue, 0, len(pkgIssues)) + for i := range pkgIssues { + issue := &pkgIssues[i] + issues = append(issues, result.Issue{ + FromLinter: issue.FromLinter, + Text: issue.Text, + Severity: issue.Severity, + Pos: issue.Pos, + LineRange: issue.LineRange, + Replacement: issue.Replacement, + Pkg: pkg, + ExpectNoLint: issue.ExpectNoLint, + ExpectedNoLintLinter: issue.ExpectedNoLintLinter, + }) + } + cacheRes.issues = issues + } + }() + } + + for _, pkg := range pkgs { + pkgCh <- pkg + } + close(pkgCh) + wg.Wait() + + loadedIssuesCount := 0 + pkgsFromCache = map[*packages.Package]bool{} + for pkg, cacheRes := range pkgToCacheRes { + if cacheRes.loadErr == nil { + loadedIssuesCount += len(cacheRes.issues) + pkgsFromCache[pkg] = true + issuesFromCache = append(issuesFromCache, cacheRes.issues...) + issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues)) + } else { + issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr) + } + } + issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages", + loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs)) + return issuesFromCache, pkgsFromCache +} + +func getIssuesCacheKey(analyzers []*analysis.Analyzer) string { + return "lint/result:" + analyzersHashID(analyzers) +} + +func analyzersHashID(analyzers []*analysis.Analyzer) string { + names := make([]string, 0, len(analyzers)) + for _, a := range analyzers { + names = append(names, a.Name) + } + + sort.Strings(names) + return strings.Join(names, ",") +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go index eb8c0577a5..13baba5a69 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go @@ -30,7 +30,7 @@ func New(settings *config.Cyclop) *goanalysis.Linter { return goanalysis.NewLinter( a.Name, - "checks function and package cyclomatic complexity", + a.Doc, []*analysis.Analyzer{a}, cfg, ).WithLoadMode(goanalysis.LoadModeTypesInfo) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go index 54d2072570..9873c9ba49 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go @@ -23,8 +23,9 @@ func New(settings *config.GinkgoLinterSettings) *goanalysis.Linter { SuppressTypeCompare: types.Boolean(settings.SuppressTypeCompareWarning), AllowHaveLen0: types.Boolean(settings.AllowHaveLenZero), ForceExpectTo: types.Boolean(settings.ForceExpectTo), - ValidateAsyncIntervals: types.Boolean(settings.ForbidSpecPollution), - ForbidSpecPollution: types.Boolean(settings.ValidateAsyncIntervals), + ValidateAsyncIntervals: types.Boolean(settings.ValidateAsyncIntervals), + ForbidSpecPollution: types.Boolean(settings.ForbidSpecPollution), + ForceSucceedForFuncs: types.Boolean(settings.ForceSucceedForFuncs), } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go index 446f0e564f..7aab0efebb 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go @@ -8,6 +8,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/packages" + "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/goanalysis" "github.com/golangci/golangci-lint/pkg/lint/linter" "github.com/golangci/golangci-lint/pkg/result" @@ -15,7 +16,7 @@ import ( const linterName = "gochecksumtype" -func New() *goanalysis.Linter { +func New(settings *config.GoChecksumTypeSettings) *goanalysis.Linter { var mu sync.Mutex var resIssues []goanalysis.Issue @@ -23,7 +24,7 @@ func New() *goanalysis.Linter { Name: linterName, Doc: goanalysis.TheOnlyanalyzerDoc, Run: func(pass *analysis.Pass) (any, error) { - issues, err := runGoCheckSumType(pass) + issues, err := runGoCheckSumType(pass, settings) if err != nil { return nil, err } @@ -50,7 +51,7 @@ func New() *goanalysis.Linter { }).WithLoadMode(goanalysis.LoadModeTypesInfo) } -func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) { +func runGoCheckSumType(pass *analysis.Pass, settings *config.GoChecksumTypeSettings) ([]goanalysis.Issue, error) { var resIssues []goanalysis.Issue pkg := &packages.Package{ @@ -61,7 +62,8 @@ func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) { } var unknownError error - errors := gochecksumtype.Run([]*packages.Package{pkg}) + errors := gochecksumtype.Run([]*packages.Package{pkg}, + gochecksumtype.Config{DefaultSignifiesExhaustive: settings.DefaultSignifiesExhaustive}) for _, err := range errors { err, ok := err.(gochecksumtype.Error) if !ok { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go index 68cc338e43..194ea35355 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go @@ -8,6 +8,7 @@ import ( "path/filepath" "reflect" "runtime" + "slices" "sort" "strings" "sync" @@ -16,7 +17,6 @@ import ( gocriticlinter "github.com/go-critic/go-critic/linter" _ "github.com/quasilyte/go-ruleguard/dsl" "golang.org/x/exp/maps" - "golang.org/x/exp/slices" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/config" diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go index 14d517fb30..c6b1aae6be 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go @@ -51,7 +51,7 @@ func New(settings *config.GoHeaderSettings) *goanalysis.Linter { return goanalysis.NewLinter( linterName, - "Checks is file header matches to pattern", + "Checks if file header matches to pattern", []*analysis.Analyzer{analyzer}, nil, ).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go index 85154a9b38..c206ffaa3e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go @@ -1,7 +1,7 @@ package goprintffuncname import ( - "github.com/jirfag/go-printf-func-name/pkg/analyzer" + "github.com/golangci/go-printf-func-name/pkg/analyzer" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/goanalysis" diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go new file mode 100644 index 0000000000..31f88160ea --- /dev/null +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go @@ -0,0 +1,57 @@ +package iface + +import ( + "slices" + + "github.com/uudashr/iface/identical" + "github.com/uudashr/iface/opaque" + "github.com/uudashr/iface/unused" + "golang.org/x/tools/go/analysis" + + "github.com/golangci/golangci-lint/pkg/config" + "github.com/golangci/golangci-lint/pkg/goanalysis" +) + +func New(settings *config.IfaceSettings) *goanalysis.Linter { + var conf map[string]map[string]any + if settings != nil { + conf = settings.Settings + } + + return goanalysis.NewLinter( + "iface", + "Detect the incorrect use of interfaces, helping developers avoid interface pollution.", + analyzersFromSettings(settings), + conf, + ).WithLoadMode(goanalysis.LoadModeTypesInfo) +} + +func analyzersFromSettings(settings *config.IfaceSettings) []*analysis.Analyzer { + allAnalyzers := map[string]*analysis.Analyzer{ + "identical": identical.Analyzer, + "unused": unused.Analyzer, + "opaque": opaque.Analyzer, + } + + if settings == nil || len(settings.Enable) == 0 { + // Default enable `identical` analyzer only + return []*analysis.Analyzer{identical.Analyzer} + } + + var analyzers []*analysis.Analyzer + for _, name := range uniqueNames(settings.Enable) { + if _, ok := allAnalyzers[name]; !ok { + // skip unknown analyzer + continue + } + + analyzers = append(analyzers, allAnalyzers[name]) + } + + return analyzers +} + +func uniqueNames(names []string) []string { + slices.Sort(names) + return slices.Compact(names) +} diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go index 958013d0df..e5a0e33b7d 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go @@ -9,11 +9,8 @@ import ( scconfig "honnef.co/go/tools/config" "github.com/golangci/golangci-lint/pkg/config" - "github.com/golangci/golangci-lint/pkg/logutils" ) -var debugf = logutils.Debug(logutils.DebugKeyMegacheck) - func SetupStaticCheckAnalyzers(src []*lint.Analyzer, checks []string) []*analysis.Analyzer { var names []string for _, a := range src { @@ -32,14 +29,6 @@ func SetupStaticCheckAnalyzers(src []*lint.Analyzer, checks []string) []*analysi return ret } -func SetAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) { - if v := a.Flags.Lookup("go"); v != nil { - if err := v.Value.Set(goVersion); err != nil { - debugf("Failed to set go version: %s", err) - } - } -} - func StaticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config { var cfg *scconfig.Config diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go index 9aa8692ff3..fe64653b91 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go @@ -12,27 +12,6 @@ func New(settings *config.MndSettings) *goanalysis.Linter { return newMND(mnd.Analyzer, settings, nil) } -func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter { - // shallow copy because mnd.Analyzer is a global variable. - a := new(analysis.Analyzer) - *a = *mnd.Analyzer - - // Used to force the analyzer name to use the same name as the linter. - // This is required to avoid displaying the analyzer name inside the issue text. - a.Name = "gomnd" - - var linterCfg map[string]map[string]any - - if settings != nil && len(settings.Settings) > 0 { - // Convert deprecated setting. - linterCfg = map[string]map[string]any{ - a.Name: settings.Settings["mnd"], - } - } - - return newMND(a, &settings.MndSettings, linterCfg) -} - func newMND(a *analysis.Analyzer, settings *config.MndSettings, linterCfg map[string]map[string]any) *goanalysis.Linter { if len(linterCfg) == 0 && settings != nil { cfg := make(map[string]any) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go index beabf2cd8f..e69fa5e9f5 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go @@ -14,7 +14,7 @@ func New(settings *config.NakedretSettings) *goanalysis.Linter { maxLines = settings.MaxFuncLines } - a := nakedret.NakedReturnAnalyzer(maxLines) + a := nakedret.NakedReturnAnalyzer(maxLines, false) return goanalysis.NewLinter( a.Name, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go index c9237035d3..d8d677d999 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go @@ -1,8 +1,6 @@ package nilnil import ( - "strings" - "github.com/Antonboom/nilnil/pkg/analyzer" "golang.org/x/tools/go/analysis" @@ -10,13 +8,16 @@ import ( "github.com/golangci/golangci-lint/pkg/goanalysis" ) -func New(cfg *config.NilNilSettings) *goanalysis.Linter { +func New(settings *config.NilNilSettings) *goanalysis.Linter { a := analyzer.New() cfgMap := make(map[string]map[string]any) - if cfg != nil && len(cfg.CheckedTypes) != 0 { + if settings != nil { cfgMap[a.Name] = map[string]any{ - "checked-types": strings.Join(cfg.CheckedTypes, ","), + "detect-opposite": settings.DetectOpposite, + } + if len(settings.CheckedTypes) != 0 { + cfgMap[a.Name]["checked-types"] = settings.CheckedTypes } } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go similarity index 76% rename from vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go rename to vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go index 3832873c63..8b030f15de 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go @@ -1,14 +1,14 @@ -package execinquery +package recvcheck import ( - "github.com/lufeee/execinquery" + "github.com/raeperd/recvcheck" "golang.org/x/tools/go/analysis" "github.com/golangci/golangci-lint/pkg/goanalysis" ) func New() *goanalysis.Linter { - a := execinquery.Analyzer + a := recvcheck.Analyzer return goanalysis.NewLinter( a.Name, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go index 90ce15db63..056a258e0a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go @@ -184,8 +184,8 @@ func toIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue { // This function mimics the GetConfig function of revive. // This allows to get default values and right types. // https://github.com/golangci/golangci-lint/issues/1745 -// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L217 -// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L169-L174 +// https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L220 +// https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L172-L178 func getConfig(cfg *config.ReviveSettings) (*lint.Config, error) { conf := defaultConfig() @@ -284,7 +284,7 @@ func safeTomlSlice(r []any) []any { } // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.3.9/config/config.go#L15 +// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L16 var defaultRules = []lint.Rule{ &rule.VarDeclarationsRule{}, &rule.PackageCommentsRule{}, @@ -368,12 +368,14 @@ var allRules = append([]lint.Rule{ &rule.EnforceSliceStyleRule{}, &rule.MaxControlNestingRule{}, &rule.CommentsDensityRule{}, + &rule.FileLengthLimitRule{}, + &rule.FilenameFormatRule{}, }, defaultRules...) const defaultConfidence = 0.8 // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L145 +// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L183 func normalizeConfig(cfg *lint.Config) { // NOTE(ldez): this custom section for golangci-lint should be kept. // --- @@ -419,7 +421,7 @@ func normalizeConfig(cfg *lint.Config) { } // This element is not exported by revive, so we need copy the code. -// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L214 +// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L252 func defaultConfig() *lint.Config { defaultConfig := lint.Config{ Confidence: defaultConfidence, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go index b80a783b65..2fc247fab8 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go @@ -25,5 +25,5 @@ func New(settings *config.TenvSettings) *goanalysis.Linter { a.Doc, []*analysis.Analyzer{a}, cfg, - ).WithLoadMode(goanalysis.LoadModeSyntax) + ).WithLoadMode(goanalysis.LoadModeTypesInfo) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go index 160620338f..d04a11b81f 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go @@ -4,7 +4,7 @@ import ( "context" "fmt" - "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/internal/cache" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/exitcodes" "github.com/golangci/golangci-lint/pkg/fsutils" @@ -19,13 +19,13 @@ type ContextBuilder struct { pkgLoader *PackageLoader fileCache *fsutils.FileCache - pkgCache *pkgcache.Cache + pkgCache *cache.Cache loadGuard *load.Guard } func NewContextBuilder(cfg *config.Config, pkgLoader *PackageLoader, - fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard, + fileCache *fsutils.FileCache, pkgCache *cache.Cache, loadGuard *load.Guard, ) *ContextBuilder { return &ContextBuilder{ cfg: cfg, diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go index 57c51fa75e..6d6d4b17e7 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go @@ -81,7 +81,7 @@ func (lc *Config) IsSlowLinter() bool { } func (lc *Config) WithLoadFiles() *Config { - lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles + lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedModule return lc } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go index 5c03630b26..9f29b5c4c8 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go @@ -5,7 +5,7 @@ import ( "golang.org/x/tools/go/packages" - "github.com/golangci/golangci-lint/internal/pkgcache" + "github.com/golangci/golangci-lint/internal/cache" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/goanalysis/load" @@ -24,7 +24,7 @@ type Context struct { FileCache *fsutils.FileCache Log logutils.Log - PkgCache *pkgcache.Cache + PkgCache *cache.Cache LoadGuard *load.Guard } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go index c06cd9a038..d2a2dc3d07 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go @@ -23,7 +23,6 @@ import ( "github.com/golangci/golangci-lint/pkg/golinters/errchkjson" "github.com/golangci/golangci-lint/pkg/golinters/errname" "github.com/golangci/golangci-lint/pkg/golinters/errorlint" - "github.com/golangci/golangci-lint/pkg/golinters/execinquery" "github.com/golangci/golangci-lint/pkg/golinters/exhaustive" "github.com/golangci/golangci-lint/pkg/golinters/exhaustruct" "github.com/golangci/golangci-lint/pkg/golinters/exportloopref" @@ -55,6 +54,7 @@ import ( "github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan" "github.com/golangci/golangci-lint/pkg/golinters/govet" "github.com/golangci/golangci-lint/pkg/golinters/grouper" + "github.com/golangci/golangci-lint/pkg/golinters/iface" "github.com/golangci/golangci-lint/pkg/golinters/importas" "github.com/golangci/golangci-lint/pkg/golinters/inamedparam" "github.com/golangci/golangci-lint/pkg/golinters/ineffassign" @@ -85,6 +85,7 @@ import ( "github.com/golangci/golangci-lint/pkg/golinters/promlinter" "github.com/golangci/golangci-lint/pkg/golinters/protogetter" "github.com/golangci/golangci-lint/pkg/golinters/reassign" + "github.com/golangci/golangci-lint/pkg/golinters/recvcheck" "github.com/golangci/golangci-lint/pkg/golinters/revive" "github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck" "github.com/golangci/golangci-lint/pkg/golinters/sloglint" @@ -134,7 +135,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { // When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint. return []*linter.Config{ linter.NewConfig(asasalint.New(&cfg.LintersSettings.Asasalint)). - WithSince("1.47.0"). + WithSince("v1.47.0"). WithPresets(linter.PresetBugs). WithLoadForGoAnalysis(). WithURL("https://github.com/alingse/asasalint"), @@ -145,7 +146,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/tdakkota/asciicheck"), linter.NewConfig(bidichk.New(&cfg.LintersSettings.BiDiChk)). - WithSince("1.43.0"). + WithSince("v1.43.0"). WithPresets(linter.PresetBugs). WithURL("https://github.com/breml/bidichk"), @@ -162,7 +163,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/lasiar/canonicalHeader"), linter.NewConfig(containedctx.New()). - WithSince("1.44.0"). + WithSince("v1.44.0"). WithLoadForGoAnalysis(). WithPresets(linter.PresetStyle). WithURL("https://github.com/sivchari/containedctx"), @@ -213,7 +214,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/mibk/dupl"), linter.NewConfig(dupword.New(&cfg.LintersSettings.DupWord)). - WithSince("1.50.0"). + WithSince("v1.50.0"). WithPresets(linter.PresetComment). WithURL("https://github.com/Abirdcfly/dupword"), @@ -231,7 +232,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/kisielk/errcheck"), linter.NewConfig(errchkjson.New(&cfg.LintersSettings.ErrChkJSON)). - WithSince("1.44.0"). + WithSince("v1.44.0"). WithPresets(linter.PresetBugs). WithLoadForGoAnalysis(). WithURL("https://github.com/breml/errchkjson"), @@ -248,12 +249,12 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithLoadForGoAnalysis(). WithURL("https://github.com/polyfloyd/go-errorlint"), - linter.NewConfig(execinquery.New()). + linter.NewConfig(linter.NewNoopDeprecated("execinquery", cfg, linter.DeprecationError)). WithSince("v1.46.0"). WithPresets(linter.PresetSQL). WithLoadForGoAnalysis(). WithURL("https://github.com/1uf3/execinquery"). - DeprecatedWarning("The repository of the linter has been archived by the owner.", "v1.58.0", ""), + DeprecatedError("The repository of the linter has been archived by the owner.", "v1.58.0", ""), linter.NewConfig(exhaustive.New(&cfg.LintersSettings.Exhaustive)). WithSince(" v1.28.0"). @@ -297,7 +298,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/gostaticanalysis/forcetypeassert"), linter.NewConfig(fatcontext.New()). - WithSince("1.58.0"). + WithSince("v1.58.0"). WithPresets(linter.PresetPerformance). WithLoadForGoAnalysis(). WithURL("https://github.com/Crocmagnon/fatcontext"), @@ -334,7 +335,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithSince("v1.12.0"). WithPresets(linter.PresetStyle), - linter.NewConfig(gochecksumtype.New()). + linter.NewConfig(gochecksumtype.New(&cfg.LintersSettings.GoChecksumType)). WithSince("v1.55.0"). WithPresets(linter.PresetBugs). WithLoadForGoAnalysis(). @@ -416,11 +417,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithPresets(linter.PresetStyle). WithURL("https://github.com/tommy-muehle/go-mnd"), - linter.NewConfig(mnd.NewGoMND(&cfg.LintersSettings.Gomnd)). + linter.NewConfig(linter.NewNoopDeprecated("gomnd", cfg, linter.DeprecationError)). WithSince("v1.22.0"). WithPresets(linter.PresetStyle). WithURL("https://github.com/tommy-muehle/go-mnd"). - DeprecatedWarning("The linter has been renamed.", "v1.58.0", "mnd"), + DeprecatedError("The linter has been renamed.", "v1.58.0", "mnd"), linter.NewConfig(gomoddirectives.New(&cfg.LintersSettings.GoModDirectives)). WithSince("v1.39.0"). @@ -435,7 +436,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { linter.NewConfig(goprintffuncname.New()). WithSince("v1.23.0"). WithPresets(linter.PresetStyle). - WithURL("https://github.com/jirfag/go-printf-func-name"), + WithURL("https://github.com/golangci/go-printf-func-name"), linter.NewConfig(gosec.New(&cfg.LintersSettings.Gosec)). WithSince("v1.0.0"). @@ -477,6 +478,12 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/esimonov/ifshort"). DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""), + linter.NewConfig(iface.New(&cfg.LintersSettings.Iface)). + WithSince("v1.62.0"). + WithLoadForGoAnalysis(). + WithPresets(linter.PresetStyle). + WithURL("https://github.com/uudashr/iface"), + linter.NewConfig(importas.New(&cfg.LintersSettings.ImportAs)). WithSince("v1.38.0"). WithPresets(linter.PresetStyle). @@ -652,11 +659,17 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithURL("https://github.com/ghostiam/protogetter"), linter.NewConfig(reassign.New(&cfg.LintersSettings.Reassign)). - WithSince("1.49.0"). + WithSince("v1.49.0"). WithPresets(linter.PresetBugs). WithLoadForGoAnalysis(). WithURL("https://github.com/curioswitch/go-reassign"), + linter.NewConfig(recvcheck.New()). + WithSince("v1.62.0"). + WithPresets(linter.PresetBugs). + WithLoadForGoAnalysis(). + WithURL("https://github.com/raeperd/recvcheck"), + linter.NewConfig(revive.New(&cfg.LintersSettings.Revive)). WithSince("v1.37.0"). WithPresets(linter.PresetStyle, linter.PresetMetaLinter). @@ -699,7 +712,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithLoadForGoAnalysis(). WithPresets(linter.PresetBugs, linter.PresetMetaLinter). WithAlternativeNames(megacheckName). - WithURL("https://staticcheck.io/"), + WithURL("https://staticcheck.dev/"), linter.NewConfig(linter.NewNoopDeprecated("structcheck", cfg, linter.DeprecationError)). WithSince("v1.0.0"). @@ -838,6 +851,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) { WithSince("v1.26.0"). WithPresets(linter.PresetStyle). WithAutoFix(). - WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"), + WithURL("https://github.com/golangci/golangci-lint/tree/master/pkg/golinters/nolintlint/internal"), }, nil } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go index c3b983ff6a..2c47c7166e 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go @@ -115,17 +115,17 @@ func (r *Runner) Run(ctx context.Context, linters []*linter.Config) ([]result.Is ) for _, lc := range linters { - sw.TrackStage(lc.Name(), func() { - linterIssues, err := r.runLinterSafe(ctx, r.lintCtx, lc) - if err != nil { - lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err) - r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err) + linterIssues, err := timeutils.TrackStage(sw, lc.Name(), func() ([]result.Issue, error) { + return r.runLinterSafe(ctx, r.lintCtx, lc) + }) + if err != nil { + lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err) + r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err) - return - } + continue + } - issues = append(issues, linterIssues...) - }) + issues = append(issues, linterIssues...) } return r.processLintResults(issues), lintErrors @@ -188,9 +188,7 @@ func (r *Runner) processLintResults(inIssues []result.Issue) []result.Issue { // finalize processors: logging, clearing, no heavy work here for _, p := range r.Processors { - sw.TrackStage(p.Name(), func() { - p.Finish() - }) + sw.TrackStage(p.Name(), p.Finish) } if issuesBefore != issuesAfter { @@ -216,10 +214,8 @@ func (r *Runner) printPerProcessorStat(stat map[string]processorStat) { func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue { for _, p := range r.Processors { - var newIssues []result.Issue - var err error - sw.TrackStage(p.Name(), func() { - newIssues, err = p.Process(issues) + newIssues, err := timeutils.TrackStage(sw, p.Name(), func() ([]result.Issue, error) { + return p.Process(issues) }) if err != nil { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go index e4bb98109d..3c27e2557a 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go @@ -60,11 +60,10 @@ const ( ) const ( - DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter. - DebugKeyGovet = "govet" // Debugs `govet` linter. - DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters. - DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments. - DebugKeyRevive = "revive" // Debugs `revive` linter. + DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter. + DebugKeyGovet = "govet" // Debugs `govet` linter. + DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments. + DebugKeyRevive = "revive" // Debugs `revive` linter. ) func getEnabledDebugs() map[string]bool { diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go index 50d6dcff3b..b65339682c 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go @@ -12,9 +12,10 @@ const defaultCodeClimateSeverity = "critical" // CodeClimateIssue is a subset of the Code Climate spec. // https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types // It is just enough to support GitLab CI Code Quality. -// https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html +// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool type CodeClimateIssue struct { Description string `json:"description"` + CheckName string `json:"check_name"` Severity string `json:"severity,omitempty"` Fingerprint string `json:"fingerprint"` Location struct { @@ -35,10 +36,13 @@ func NewCodeClimate(w io.Writer) *CodeClimate { func (p CodeClimate) Print(issues []result.Issue) error { codeClimateIssues := make([]CodeClimateIssue, 0, len(issues)) + for i := range issues { issue := &issues[i] + codeClimateIssue := CodeClimateIssue{} codeClimateIssue.Description = issue.Description() + codeClimateIssue.CheckName = issue.FromLinter codeClimateIssue.Location.Path = issue.Pos.Filename codeClimateIssue.Location.Lines.Begin = issue.Pos.Line codeClimateIssue.Fingerprint = issue.Fingerprint() @@ -51,9 +55,5 @@ func (p CodeClimate) Print(issues []result.Issue) error { codeClimateIssues = append(codeClimateIssues, codeClimateIssue) } - err := json.NewEncoder(p.w).Encode(codeClimateIssues) - if err != nil { - return err - } - return nil + return json.NewEncoder(p.w).Encode(codeClimateIssues) } diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go index 4915dc479a..764af5a923 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go @@ -8,7 +8,7 @@ import ( "sort" "strings" - "github.com/golangci/golangci-lint/internal/robustio" + "github.com/golangci/golangci-lint/internal/go/robustio" "github.com/golangci/golangci-lint/pkg/config" "github.com/golangci/golangci-lint/pkg/fsutils" "github.com/golangci/golangci-lint/pkg/logutils" @@ -56,9 +56,8 @@ func (p Fixer) Process(issues []result.Issue) ([]result.Issue, error) { } for file, issuesToFix := range issuesToFixPerFile { - var err error - p.sw.TrackStage("all", func() { - err = p.fixIssuesInFile(file, issuesToFix) + err := p.sw.TrackStageErr("all", func() error { + return p.fixIssuesInFile(file, issuesToFix) }) if err != nil { p.log.Errorf("Failed to fix issues in file %s: %s", file, err) diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go index d944dea2ea..95b16de9fc 100644 --- a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go +++ b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go @@ -114,3 +114,25 @@ func (s *Stopwatch) TrackStage(name string, f func()) { s.stages[name] += time.Since(startedAt) s.mu.Unlock() } + +func (s *Stopwatch) TrackStageErr(name string, f func() error) error { + startedAt := time.Now() + err := f() + + s.mu.Lock() + s.stages[name] += time.Since(startedAt) + s.mu.Unlock() + + return err +} + +func TrackStage[T any](s *Stopwatch, name string, f func() (T, error)) (T, error) { + var result T + var err error + + s.TrackStage(name, func() { + result, err = f() + }) + + return result, err +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go index dff391797d..98f28e9a6b 100644 --- a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go +++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go @@ -84,7 +84,8 @@ func walkThroughEmbeddedInterfaces(sel *types.Selection) ([]types.Type, bool) { } func getTypeAtFieldIndex(startingAt types.Type, fieldIndex int) types.Type { - t := maybeUnname(maybeDereference(startingAt)) + t := maybeDereference(maybeUnalias(startingAt)) + t = maybeUnname(maybeUnalias(t)) s, ok := t.(*types.Struct) if !ok { panic(fmt.Sprintf("cannot get Field of a type that is not a struct, got a %T", t)) diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go new file mode 100644 index 0000000000..f2df6849bb --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go @@ -0,0 +1,10 @@ +//go:build !go1.22 +// +build !go1.22 + +package errcheck + +import "go/types" + +func maybeUnalias(t types.Type) types.Type { + return t +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go new file mode 100644 index 0000000000..cbff3cd434 --- /dev/null +++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go @@ -0,0 +1,10 @@ +//go:build go1.22 +// +build go1.22 + +package errcheck + +import "go/types" + +func maybeUnalias(t types.Type) types.Type { + return types.Unalias(t) +} diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go index d61d348f77..a7a2a30bf7 100644 --- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go +++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go @@ -80,7 +80,7 @@ func (r *Result) Append(other Result) { r.UncheckedErrors = append(r.UncheckedErrors, other.UncheckedErrors...) } -// Returns the unique errors that have been accumulated. Duplicates may occur +// Unique returns the unique errors that have been accumulated. Duplicates may occur // when a file containing an unchecked error belongs to > 1 package. // // The method receiver remains unmodified after the call to Unique. @@ -338,7 +338,7 @@ func (v *visitor) selectorName(call *ast.CallExpr) string { // then just that function's fullName is returned. // // Otherwise, we walk through all the potentially embedded interfaces of the receiver -// the collect a list of type-qualified function names that we will check. +// to collect a list of type-qualified function names that we will check. func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string { sel, fn, ok := v.selectorAndFunc(call) if !ok { @@ -351,7 +351,7 @@ func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string { } // This will be missing for functions without a receiver (like fmt.Printf), - // so just fall back to the the function's fullName in that case. + // so just fall back to the function's fullName in that case. selection, ok := v.typesInfo.Selections[sel] if !ok { return []string{name} @@ -420,9 +420,9 @@ func (v *visitor) ignoreCall(call *ast.CallExpr) bool { // 2. x.y.f() var id *ast.Ident switch exp := call.Fun.(type) { - case (*ast.Ident): + case *ast.Ident: id = exp - case (*ast.SelectorExpr): + case *ast.SelectorExpr: id = exp.Sel default: // eg: *ast.SliceExpr, *ast.IndexExpr @@ -586,26 +586,38 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor { for _, name := range vspec.Names { lhs = append(lhs, ast.Expr(name)) } - v.checkAssignment(lhs, vspec.Values) + followed := v.checkAssignment(lhs, vspec.Values) + if !followed { + return nil + } } case *ast.AssignStmt: - v.checkAssignment(stmt.Lhs, stmt.Rhs) + followed := v.checkAssignment(stmt.Lhs, stmt.Rhs) + if !followed { + return nil + } + + case *ast.TypeAssertExpr: + v.checkAssertExpr(stmt) + return nil default: } return v } -func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { +// checkAssignment checks the assignment statement and returns a boolean value +// indicating whether to continue checking the substructure in AssignStmt or not +func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) (followed bool) { if len(rhs) == 1 { // single value on rhs; check against lhs identifiers if call, ok := rhs[0].(*ast.CallExpr); ok { if !v.blank { - return + return true } if v.ignoreCall(call) { - return + return true } isError := v.errorsByArg(call) for i := 0; i < len(lhs); i++ { @@ -619,11 +631,11 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { } } else if assert, ok := rhs[0].(*ast.TypeAssertExpr); ok { if !v.asserts { - return + return false } if assert.Type == nil { // type switch - return + return false } if len(lhs) < 2 { // assertion result not read @@ -632,6 +644,7 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { // assertion result ignored v.addErrorAtPosition(id.NamePos, nil) } + return false } } else { // multiple value on rhs; in this case a call can't return @@ -661,6 +674,19 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) { } } } + + return true +} + +func (v *visitor) checkAssertExpr(expr *ast.TypeAssertExpr) { + if !v.asserts { + return + } + if expr.Type == nil { + // type switch + return + } + v.addErrorAtPosition(expr.Pos(), nil) } func isErrorType(t types.Type) bool { diff --git a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go index a783b5a763..450b798e4e 100644 --- a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go +++ b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go @@ -47,6 +47,11 @@ var DefaultExcludedSymbols = []string{ // hash "(hash.Hash).Write", + + // hash/maphash + "(*hash/maphash.Hash).Write", + "(*hash/maphash.Hash).WriteByte", + "(*hash/maphash.Hash).WriteString", } // ReadExcludes reads an excludes file, a newline delimited file that lists diff --git a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml index 5652c8d6cc..997ec0cb01 100644 --- a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml +++ b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml @@ -40,6 +40,9 @@ linters: fast: false enable: + # Globals and init() are no ok, because this linter use on golangci lint. + - gochecknoglobals + - gochecknoinits # Check for pass []any as any in variadic func(...any). # Rare case but saved me from debugging a few times. - asasalint @@ -58,6 +61,12 @@ linters: # Check whether the function uses a non-inherited context. - contextcheck + # after go 1.22 don't need copy var at for range. + - copyloopvar + + # Find duplicate words, rare. + - dupword + # Check for two durations multiplied together. - durationcheck @@ -73,6 +82,10 @@ linters: # Checks for pointers to enclosing loop variables. - exportloopref + + # Imports order. + - gci + # As you already know I'm a co-author. It would be strange to not use # one of my warmly loved projects. - gocritic @@ -104,9 +117,15 @@ linters: # Last week I caught a bug with it. - ineffassign + # range over int, work after go 1.22 + - intrange + # Fix all the misspells, amazing thing. - misspell + # Reports wrong mirror patterns of bytes/strings usage. + - mirror + # Finds naked/bare returns and requires change them. - nakedret @@ -121,6 +140,9 @@ linters: # Better not to have //nolint: at all ;) - nolintlint + # aiming at usages of fmt.Sprintf which have faster alternatives. + - perfsprint + # Finds slices that could potentially be pre-allocated. # Small performance win + cleaner code. - prealloc @@ -144,6 +166,9 @@ linters: - rowserrcheck - sqlclosecheck + # Ensure consistent code style when using log/slog. + - sloglint + # I have found that it's not the same as staticcheck binary :\ - staticcheck @@ -156,6 +181,7 @@ linters: # Test-related checks. All of them are good. - tenv - testableexamples + - testifylint - thelper - tparallel @@ -185,9 +211,6 @@ linters: # (c) Bryan C. Mills / https://github.com/bcmills - cyclop - # Abandoned, replaced by `unused`. - - deadcode - # Check declaration order of types, consts, vars and funcs. # I like it but I don't use it. - decorder @@ -202,9 +225,6 @@ linters: # Tool for code clone detection. - dupl - # Find duplicate words, rare. - - dupword - # I'm fine to check the error from json.Marshal ¯\_(ツ)_/¯ - errchkjson @@ -213,7 +233,6 @@ linters: # Forces to handle more cases. Cool but noisy. - exhaustive - - exhaustivestruct # Deprecated, replaced by check below. - exhaustruct # Forbids some identifiers. I don't have a case for it. @@ -225,19 +244,12 @@ linters: # I might have long but a simple function. - funlen - # Imports order. I do this manually ¯\_(ツ)_/¯ - - gci - # I'm not a fan of ginkgo and gomega packages. - ginkgolinter # Checks that compiler directive comments (//go:) are valid. Rare. - gocheckcompilerdirectives - # Globals and init() are ok. - - gochecknoglobals - - gochecknoinits - # Same as `cyclop` linter (see above) - gocognit - goconst @@ -247,16 +259,13 @@ linters: - godox # Check the error handling expressions. Too noisy. - - goerr113 + - err113 # I don't use file headers. - goheader - # 1st Go linter, deprecated :( use `revive`. - - golint - # Reports magic consts. Might be noisy but still good. - - gomnd + - mnd # Allowed/blocked packages to import. I prefer to do it manually. - gomodguard @@ -267,9 +276,6 @@ linters: # Groupt declarations, I prefer manually. - grouper - # Deprecated. - - ifshort - # Checks imports aliases, rare. - importas @@ -291,9 +297,6 @@ linters: # Slice declarations with non-zero initial length. Not my case. - makezero - # Deprecated. Use govet `fieldalignment`. - - maligned - # Enforce tags in un/marshaled structs. Cool but not my case. - musttag @@ -306,9 +309,6 @@ linters: # Reports all named returns, not that bad. - nonamedreturns - # Deprecated. Replaced by `revive`. - - nosnakecase - # Finds misuse of Sprintf with host:port in a URL. Cool but rare. - nosprintfhostport @@ -335,6 +335,12 @@ linters: - wsl linters-settings: + gci: + sections: + - standard + - default + - localmodule + revive: # Maximum number of open files at the same time. # See https://github.com/mgechev/revive#command-line-flags @@ -378,7 +384,6 @@ linters-settings: - name: banned-characters severity: warning disabled: false - arguments: ["Ω", "Σ", "σ", "7"] # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return - name: bare-return severity: warning @@ -404,9 +409,6 @@ linters-settings: - name: comment-spacings severity: warning disabled: false - arguments: - - mypragma - - otherpragma # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming - name: confusing-naming severity: warning @@ -444,8 +446,6 @@ linters-settings: - name: defer severity: warning disabled: false - arguments: - - ["call-chain", "loop"] # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports - name: dot-imports severity: warning @@ -470,8 +470,6 @@ linters-settings: - name: enforce-map-style severity: warning disabled: false - arguments: - - "make" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming - name: error-naming severity: warning @@ -530,8 +528,6 @@ linters-settings: - name: indent-error-flow severity: warning disabled: false - arguments: - - "preserveScope" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming - name: import-alias-naming severity: warning @@ -542,9 +538,6 @@ linters-settings: - name: imports-blacklist severity: warning disabled: false - arguments: - - "crypto/md5" - - "crypto/sha1" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing - name: import-shadowing severity: warning @@ -632,8 +625,6 @@ linters-settings: - name: superfluous-else severity: warning disabled: false - arguments: - - "preserveScope" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal - name: time-equal severity: warning @@ -646,10 +637,6 @@ linters-settings: - name: var-naming severity: warning disabled: false - arguments: - - ["ID"] # AllowList - - ["VM"] # DenyList - - - upperCaseConst: true # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration - name: var-declaration severity: warning @@ -670,9 +657,6 @@ linters-settings: - name: unhandled-error severity: warning disabled: false - arguments: - - "fmt.Printf" - - "myFunction" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt - name: unnecessary-stmt severity: warning @@ -691,8 +675,6 @@ linters-settings: - name: unused-receiver severity: warning disabled: false - arguments: - - allowRegex: "^_" # https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break - name: useless-break severity: warning diff --git a/vendor/github.com/lasiar/canonicalheader/analyzer.go b/vendor/github.com/lasiar/canonicalheader/analyzer.go index d3fb529ebd..258ebdfd4d 100644 --- a/vendor/github.com/lasiar/canonicalheader/analyzer.go +++ b/vendor/github.com/lasiar/canonicalheader/analyzer.go @@ -18,6 +18,7 @@ const ( name = "Header" ) +//nolint:gochecknoglobals // struct is not big, can be skip. var Analyzer = &analysis.Analyzer{ Name: "canonicalheader", Doc: "canonicalheader checks whether net/http.Header uses canonical header", diff --git a/vendor/github.com/lufeee/execinquery/.gitignore b/vendor/github.com/lufeee/execinquery/.gitignore deleted file mode 100644 index 00e1abc31f..0000000000 --- a/vendor/github.com/lufeee/execinquery/.gitignore +++ /dev/null @@ -1 +0,0 @@ -execinquery diff --git a/vendor/github.com/lufeee/execinquery/README.md b/vendor/github.com/lufeee/execinquery/README.md deleted file mode 100644 index 38fa7c8b96..0000000000 --- a/vendor/github.com/lufeee/execinquery/README.md +++ /dev/null @@ -1,76 +0,0 @@ -# execinquery - a simple query string checker in Query function -[![Go Matrix](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml) -[![Go lint](https://github.com/lufeee/execinquery/actions/workflows/lint.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/lint.yml) -[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE) -## About - -execinquery is a linter about query string checker in Query function which reads your Go src files and -warnings it finds. - -## Installation - -```sh -go install github.com/lufeee/execinquery/cmd/execinquery -``` - -## Usage -```go -package main - -import ( - "database/sql" - "log" -) - -func main() { - db, err := sql.Open("mysql", "test:test@tcp(test:3306)/test") - if err != nil { - log.Fatal("Database Connect Error: ", err) - } - defer db.Close() - - test := "a" - _, err = db.Query("Update * FROM hoge where id = ?", test) - if err != nil { - log.Fatal("Query Error: ", err) - } - -} -``` - -```console -go vet -vettool=$(which execinquery) ./... - -# command-line-arguments -./a.go:16:11: Use Exec instead of Query to execute `UPDATE` query -``` - -## CI - -### CircleCI - -```yaml -- run: - name: install execinquery - command: go install github.com/lufeee/execinquery - -- run: - name: run execinquery - command: go vet -vettool=`which execinquery` ./... -``` - -### GitHub Actions - -```yaml -- name: install execinquery - run: go install github.com/lufeee/execinquery - -- name: run execinquery - run: go vet -vettool=`which execinquery` ./... -``` - -### License - -MIT license. - -
diff --git a/vendor/github.com/lufeee/execinquery/execinquery.go b/vendor/github.com/lufeee/execinquery/execinquery.go deleted file mode 100644 index c37dc17010..0000000000 --- a/vendor/github.com/lufeee/execinquery/execinquery.go +++ /dev/null @@ -1,135 +0,0 @@ -package execinquery - -import ( - "go/ast" - "regexp" - "strings" - - "golang.org/x/tools/go/analysis" - "golang.org/x/tools/go/analysis/passes/inspect" - "golang.org/x/tools/go/ast/inspector" -) - -const doc = "execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds" - -// Analyzer is checking database/sql pkg Query's function -var Analyzer = &analysis.Analyzer{ - Name: "execinquery", - Doc: doc, - Run: newLinter().run, - Requires: []*analysis.Analyzer{ - inspect.Analyzer, - }, -} - -type linter struct { - commentExp *regexp.Regexp - multilineCommentExp *regexp.Regexp -} - -func newLinter() *linter { - return &linter{ - commentExp: regexp.MustCompile(`--[^\n]*\n`), - multilineCommentExp: regexp.MustCompile(`(?s)/\*.*?\*/`), - } -} - -func (l linter) run(pass *analysis.Pass) (interface{}, error) { - result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) - - nodeFilter := []ast.Node{ - (*ast.CallExpr)(nil), - } - - result.Preorder(nodeFilter, func(n ast.Node) { - switch n := n.(type) { - case *ast.CallExpr: - selector, ok := n.Fun.(*ast.SelectorExpr) - if !ok { - return - } - - if pass.TypesInfo == nil || pass.TypesInfo.Uses[selector.Sel] == nil || pass.TypesInfo.Uses[selector.Sel].Pkg() == nil { - return - } - - if "database/sql" != pass.TypesInfo.Uses[selector.Sel].Pkg().Path() { - return - } - - if !strings.Contains(selector.Sel.Name, "Query") { - return - } - - replacement := "Exec" - var i int // the index of the query argument - if strings.Contains(selector.Sel.Name, "Context") { - replacement += "Context" - i = 1 - } - - if len(n.Args) <= i { - return - } - - query := l.getQueryString(n.Args[i]) - if query == "" { - return - } - - query = strings.TrimSpace(l.cleanValue(query)) - parts := strings.SplitN(query, " ", 2) - cmd := strings.ToUpper(parts[0]) - - if strings.HasPrefix(cmd, "SELECT") { - return - } - - pass.Reportf(n.Fun.Pos(), "Use %s instead of %s to execute `%s` query", replacement, selector.Sel.Name, cmd) - } - }) - - return nil, nil -} - -func (l linter) cleanValue(s string) string { - v := strings.NewReplacer(`"`, "", "`", "").Replace(s) - - v = l.multilineCommentExp.ReplaceAllString(v, "") - - return l.commentExp.ReplaceAllString(v, "") -} - -func (l linter) getQueryString(exp interface{}) string { - switch e := exp.(type) { - case *ast.AssignStmt: - var v string - for _, stmt := range e.Rhs { - v += l.cleanValue(l.getQueryString(stmt)) - } - return v - - case *ast.BasicLit: - return e.Value - - case *ast.ValueSpec: - var v string - for _, value := range e.Values { - v += l.cleanValue(l.getQueryString(value)) - } - return v - - case *ast.Ident: - if e.Obj == nil { - return "" - } - return l.getQueryString(e.Obj.Decl) - - case *ast.BinaryExpr: - v := l.cleanValue(l.getQueryString(e.X)) - v += l.cleanValue(l.getQueryString(e.Y)) - return v - } - - return "" -} diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go index e5d890c266..ad025ad529 100644 --- a/vendor/github.com/mattn/go-runewidth/runewidth_table.go +++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go @@ -4,20 +4,21 @@ package runewidth var combining = table{ {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3}, - {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01}, - {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0}, - {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF}, + {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0CF3, 0x0CF3}, + {0x0D00, 0x0D01}, {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, + {0x1AB0, 0x1ACE}, {0x1B6B, 0x1B73}, {0x1DC0, 0x1DFF}, {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF}, {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D}, {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1}, {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A}, - {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301}, - {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374}, - {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, + {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x10F82, 0x10F85}, + {0x11300, 0x11301}, {0x1133B, 0x1133C}, {0x11366, 0x1136C}, + {0x11370, 0x11374}, {0x16AF0, 0x16AF4}, {0x1CF00, 0x1CF2D}, + {0x1CF30, 0x1CF46}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, - {0x1E8D0, 0x1E8D6}, + {0x1E08F, 0x1E08F}, {0x1E8D0, 0x1E8D6}, } var doublewidth = table{ @@ -33,33 +34,34 @@ var doublewidth = table{ {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C}, {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99}, - {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB}, - {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF}, - {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3}, - {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF}, - {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C}, - {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, - {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, - {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, - {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5}, - {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, - {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004}, - {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, - {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, - {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320}, - {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, - {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, - {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, - {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, - {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, - {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, - {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7}, - {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, - {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978}, - {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74}, - {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8}, - {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6}, - {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}, + {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x303E}, + {0x3041, 0x3096}, {0x3099, 0x30FF}, {0x3105, 0x312F}, + {0x3131, 0x318E}, {0x3190, 0x31E3}, {0x31EF, 0x321E}, + {0x3220, 0x3247}, {0x3250, 0x4DBF}, {0x4E00, 0xA48C}, + {0xA490, 0xA4C6}, {0xA960, 0xA97C}, {0xAC00, 0xD7A3}, + {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, {0xFE30, 0xFE52}, + {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, {0xFF01, 0xFF60}, + {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, {0x16FF0, 0x16FF1}, + {0x17000, 0x187F7}, {0x18800, 0x18CD5}, {0x18D00, 0x18D08}, + {0x1AFF0, 0x1AFF3}, {0x1AFF5, 0x1AFFB}, {0x1AFFD, 0x1AFFE}, + {0x1B000, 0x1B122}, {0x1B132, 0x1B132}, {0x1B150, 0x1B152}, + {0x1B155, 0x1B155}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, + {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, + {0x1F191, 0x1F19A}, {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, + {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, + {0x1F300, 0x1F320}, {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, + {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, + {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, + {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, + {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, + {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, + {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, + {0x1F6D5, 0x1F6D7}, {0x1F6DC, 0x1F6DF}, {0x1F6EB, 0x1F6EC}, + {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, {0x1F7F0, 0x1F7F0}, + {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F9FF}, + {0x1FA70, 0x1FA7C}, {0x1FA80, 0x1FA88}, {0x1FA90, 0x1FABD}, + {0x1FABF, 0x1FAC5}, {0x1FACE, 0x1FADB}, {0x1FAE0, 0x1FAE8}, + {0x1FAF0, 0x1FAF8}, {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD}, } var ambiguous = table{ @@ -154,43 +156,43 @@ var neutral = table{ {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F}, {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F}, {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4}, - {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A}, - {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D}, - {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E}, - {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7}, - {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990}, - {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, - {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, - {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, - {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, - {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, - {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, - {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, - {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, - {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, - {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, - {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, - {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, - {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, - {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, - {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, - {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, - {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, - {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, - {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, - {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, - {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, - {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, - {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, - {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, - {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, - {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, - {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63}, + {0x0600, 0x070D}, {0x070F, 0x074A}, {0x074D, 0x07B1}, + {0x07C0, 0x07FA}, {0x07FD, 0x082D}, {0x0830, 0x083E}, + {0x0840, 0x085B}, {0x085E, 0x085E}, {0x0860, 0x086A}, + {0x0870, 0x088E}, {0x0890, 0x0891}, {0x0898, 0x0983}, + {0x0985, 0x098C}, {0x098F, 0x0990}, {0x0993, 0x09A8}, + {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, {0x09B6, 0x09B9}, + {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, {0x09CB, 0x09CE}, + {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, {0x09DF, 0x09E3}, + {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, {0x0A05, 0x0A0A}, + {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, {0x0A2A, 0x0A30}, + {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, {0x0A38, 0x0A39}, + {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, {0x0A47, 0x0A48}, + {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, {0x0A59, 0x0A5C}, + {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, {0x0A81, 0x0A83}, + {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, {0x0A93, 0x0AA8}, + {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, {0x0AB5, 0x0AB9}, + {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, {0x0ACB, 0x0ACD}, + {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, {0x0AE6, 0x0AF1}, + {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, {0x0B05, 0x0B0C}, + {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, {0x0B2A, 0x0B30}, + {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, {0x0B3C, 0x0B44}, + {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, {0x0B55, 0x0B57}, + {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, {0x0B66, 0x0B77}, + {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, {0x0B8E, 0x0B90}, + {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, {0x0B9C, 0x0B9C}, + {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, {0x0BA8, 0x0BAA}, + {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, {0x0BC6, 0x0BC8}, + {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, {0x0BD7, 0x0BD7}, + {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, {0x0C0E, 0x0C10}, + {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, {0x0C3C, 0x0C44}, + {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56}, + {0x0C58, 0x0C5A}, {0x0C5D, 0x0C5D}, {0x0C60, 0x0C63}, {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90}, {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9}, {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD}, - {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3}, - {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C}, + {0x0CD5, 0x0CD6}, {0x0CDD, 0x0CDE}, {0x0CE0, 0x0CE3}, + {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF3}, {0x0D00, 0x0D0C}, {0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48}, {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F}, {0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1}, @@ -200,7 +202,7 @@ var neutral = table{ {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82}, {0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3}, {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4}, - {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9}, + {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECE}, {0x0ED0, 0x0ED9}, {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C}, {0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC}, {0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7}, @@ -212,20 +214,19 @@ var neutral = table{ {0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A}, {0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5}, {0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8}, - {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736}, - {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770}, - {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9}, - {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819}, - {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5}, - {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B}, - {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974}, - {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA}, - {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C}, - {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD}, - {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C}, - {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49}, - {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7}, - {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15}, + {0x1700, 0x1715}, {0x171F, 0x1736}, {0x1740, 0x1753}, + {0x1760, 0x176C}, {0x176E, 0x1770}, {0x1772, 0x1773}, + {0x1780, 0x17DD}, {0x17E0, 0x17E9}, {0x17F0, 0x17F9}, + {0x1800, 0x1819}, {0x1820, 0x1878}, {0x1880, 0x18AA}, + {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1920, 0x192B}, + {0x1930, 0x193B}, {0x1940, 0x1940}, {0x1944, 0x196D}, + {0x1970, 0x1974}, {0x1980, 0x19AB}, {0x19B0, 0x19C9}, + {0x19D0, 0x19DA}, {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, + {0x1A60, 0x1A7C}, {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, + {0x1AA0, 0x1AAD}, {0x1AB0, 0x1ACE}, {0x1B00, 0x1B4C}, + {0x1B50, 0x1B7E}, {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, + {0x1C3B, 0x1C49}, {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, + {0x1CBD, 0x1CC7}, {0x1CD0, 0x1CFA}, {0x1D00, 0x1F15}, {0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D}, {0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B}, {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4}, @@ -237,7 +238,7 @@ var neutral = table{ {0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064}, {0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080}, {0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8}, - {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0}, + {0x20AA, 0x20AB}, {0x20AD, 0x20C0}, {0x20D0, 0x20F0}, {0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108}, {0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120}, {0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152}, @@ -275,15 +276,15 @@ var neutral = table{ {0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE}, {0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A}, {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73}, - {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E}, - {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27}, - {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70}, - {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE}, - {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6}, - {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE}, - {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF}, - {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF}, - {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839}, + {0x2B76, 0x2B95}, {0x2B97, 0x2CF3}, {0x2CF9, 0x2D25}, + {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, + {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, + {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, + {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, + {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E5D}, {0x303F, 0x303F}, + {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, + {0xA700, 0xA7CA}, {0xA7D0, 0xA7D1}, {0xA7D3, 0xA7D3}, + {0xA7D5, 0xA7D9}, {0xA7F2, 0xA82C}, {0xA830, 0xA839}, {0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9}, {0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD}, {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36}, @@ -294,8 +295,8 @@ var neutral = table{ {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF}, {0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36}, {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41}, - {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F}, - {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD}, + {0xFB43, 0xFB44}, {0xFB46, 0xFBC2}, {0xFBD3, 0xFD8F}, + {0xFD92, 0xFDC7}, {0xFDCF, 0xFDCF}, {0xFDF0, 0xFDFF}, {0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B}, {0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D}, @@ -307,44 +308,48 @@ var neutral = table{ {0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5}, {0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3}, {0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563}, - {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755}, - {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808}, - {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C}, - {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF}, - {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B}, - {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7}, - {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06}, - {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35}, - {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58}, - {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6}, - {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72}, - {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF}, - {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2}, - {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E}, - {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1}, - {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB}, - {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F}, - {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, - {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147}, - {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4}, - {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286}, - {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, - {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, - {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310}, - {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333}, - {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348}, - {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357}, - {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374}, - {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7}, - {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD}, - {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C}, - {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A}, - {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B}, - {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909}, - {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935}, - {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959}, - {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, - {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8}, + {0x1056F, 0x1057A}, {0x1057C, 0x1058A}, {0x1058C, 0x10592}, + {0x10594, 0x10595}, {0x10597, 0x105A1}, {0x105A3, 0x105B1}, + {0x105B3, 0x105B9}, {0x105BB, 0x105BC}, {0x10600, 0x10736}, + {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10780, 0x10785}, + {0x10787, 0x107B0}, {0x107B2, 0x107BA}, {0x10800, 0x10805}, + {0x10808, 0x10808}, {0x1080A, 0x10835}, {0x10837, 0x10838}, + {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10857, 0x1089E}, + {0x108A7, 0x108AF}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, + {0x108FB, 0x1091B}, {0x1091F, 0x10939}, {0x1093F, 0x1093F}, + {0x10980, 0x109B7}, {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, + {0x10A05, 0x10A06}, {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, + {0x10A19, 0x10A35}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, + {0x10A50, 0x10A58}, {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, + {0x10AEB, 0x10AF6}, {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, + {0x10B58, 0x10B72}, {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, + {0x10BA9, 0x10BAF}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, + {0x10CC0, 0x10CF2}, {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, + {0x10E60, 0x10E7E}, {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, + {0x10EB0, 0x10EB1}, {0x10EFD, 0x10F27}, {0x10F30, 0x10F59}, + {0x10F70, 0x10F89}, {0x10FB0, 0x10FCB}, {0x10FE0, 0x10FF6}, + {0x11000, 0x1104D}, {0x11052, 0x11075}, {0x1107F, 0x110C2}, + {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, {0x110F0, 0x110F9}, + {0x11100, 0x11134}, {0x11136, 0x11147}, {0x11150, 0x11176}, + {0x11180, 0x111DF}, {0x111E1, 0x111F4}, {0x11200, 0x11211}, + {0x11213, 0x11241}, {0x11280, 0x11286}, {0x11288, 0x11288}, + {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, {0x1129F, 0x112A9}, + {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, {0x11300, 0x11303}, + {0x11305, 0x1130C}, {0x1130F, 0x11310}, {0x11313, 0x11328}, + {0x1132A, 0x11330}, {0x11332, 0x11333}, {0x11335, 0x11339}, + {0x1133B, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D}, + {0x11350, 0x11350}, {0x11357, 0x11357}, {0x1135D, 0x11363}, + {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x11400, 0x1145B}, + {0x1145D, 0x11461}, {0x11480, 0x114C7}, {0x114D0, 0x114D9}, + {0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644}, + {0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B9}, + {0x116C0, 0x116C9}, {0x11700, 0x1171A}, {0x1171D, 0x1172B}, + {0x11730, 0x11746}, {0x11800, 0x1183B}, {0x118A0, 0x118F2}, + {0x118FF, 0x11906}, {0x11909, 0x11909}, {0x1190C, 0x11913}, + {0x11915, 0x11916}, {0x11918, 0x11935}, {0x11937, 0x11938}, + {0x1193B, 0x11946}, {0x11950, 0x11959}, {0x119A0, 0x119A7}, + {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, {0x11A00, 0x11A47}, + {0x11A50, 0x11AA2}, {0x11AB0, 0x11AF8}, {0x11B00, 0x11B09}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45}, {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09}, @@ -352,30 +357,36 @@ var neutral = table{ {0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65}, {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91}, {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8}, + {0x11F00, 0x11F10}, {0x11F12, 0x11F3A}, {0x11F3E, 0x11F59}, {0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399}, {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543}, - {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646}, + {0x12F90, 0x12FF2}, {0x13000, 0x13455}, {0x14400, 0x14646}, {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69}, - {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5}, - {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61}, - {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A}, - {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F}, - {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88}, - {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5}, - {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245}, - {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, - {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, - {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, - {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, - {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, - {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, - {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, - {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, - {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006}, - {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, - {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D}, - {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9}, - {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6}, + {0x16A6E, 0x16ABE}, {0x16AC0, 0x16AC9}, {0x16AD0, 0x16AED}, + {0x16AF0, 0x16AF5}, {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, + {0x16B5B, 0x16B61}, {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, + {0x16E40, 0x16E9A}, {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, + {0x16F8F, 0x16F9F}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, + {0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, + {0x1CF00, 0x1CF2D}, {0x1CF30, 0x1CF46}, {0x1CF50, 0x1CFC3}, + {0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D1EA}, + {0x1D200, 0x1D245}, {0x1D2C0, 0x1D2D3}, {0x1D2E0, 0x1D2F3}, + {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, {0x1D400, 0x1D454}, + {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, {0x1D4A2, 0x1D4A2}, + {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, {0x1D4AE, 0x1D4B9}, + {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, {0x1D4C5, 0x1D505}, + {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, {0x1D516, 0x1D51C}, + {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, {0x1D540, 0x1D544}, + {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, {0x1D552, 0x1D6A5}, + {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, {0x1DA9B, 0x1DA9F}, + {0x1DAA1, 0x1DAAF}, {0x1DF00, 0x1DF1E}, {0x1DF25, 0x1DF2A}, + {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, + {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E030, 0x1E06D}, + {0x1E08F, 0x1E08F}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D}, + {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E290, 0x1E2AE}, + {0x1E2C0, 0x1E2F9}, {0x1E2FF, 0x1E2FF}, {0x1E4D0, 0x1E4F9}, + {0x1E7E0, 0x1E7E6}, {0x1E7E8, 0x1E7EB}, {0x1E7ED, 0x1E7EE}, + {0x1E7F0, 0x1E7FE}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6}, {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F}, {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03}, {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24}, @@ -400,8 +411,8 @@ var neutral = table{ {0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594}, {0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F}, {0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4}, - {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, - {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, + {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F776}, + {0x1F77B, 0x1F7D9}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B}, {0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D}, diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go index fed1d1913a..16559f5ec0 100644 --- a/vendor/github.com/mgechev/revive/config/config.go +++ b/vendor/github.com/mgechev/revive/config/config.go @@ -96,6 +96,8 @@ var allRules = append([]lint.Rule{ &rule.EnforceSliceStyleRule{}, &rule.MaxControlNestingRule{}, &rule.CommentsDensityRule{}, + &rule.FileLengthLimitRule{}, + &rule.FilenameFormatRule{}, }, defaultRules...) var allFormatters = []lint.Formatter{ diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go index 23255304c5..e34f8b7f4b 100644 --- a/vendor/github.com/mgechev/revive/lint/file.go +++ b/vendor/github.com/mgechev/revive/lint/file.go @@ -188,7 +188,7 @@ func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, fa enabledDisabledRulesMap[name] = existing } - handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval { + handleRules := func(_, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval { var result []DisabledInterval for _, name := range ruleNames { if modifier == "line" { diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go index 3c97f306f0..56036e83d8 100644 --- a/vendor/github.com/mgechev/revive/lint/linter.go +++ b/vendor/github.com/mgechev/revive/lint/linter.go @@ -3,11 +3,9 @@ package lint import ( "bufio" "bytes" - "encoding/json" "fmt" "go/token" "os" - "os/exec" "path/filepath" "regexp" "strconv" @@ -15,6 +13,7 @@ import ( "sync" goversion "github.com/hashicorp/go-version" + "golang.org/x/mod/modfile" ) // ReadFile defines an abstraction for reading files. @@ -55,8 +54,9 @@ func (l Linter) readFile(path string) (result []byte, err error) { } var ( - genHdr = []byte("// Code generated ") - genFtr = []byte(" DO NOT EDIT.") + genHdr = []byte("// Code generated ") + genFtr = []byte(" DO NOT EDIT.") + defaultGoVersion = goversion.Must(goversion.NewVersion("1.0")) ) // Lint lints a set of files with the specified rule. @@ -93,7 +93,9 @@ func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-cha d, v, err := detectGoMod(dir) if err != nil { - return nil, err + // No luck finding the go.mod file thus set the default Go version + v = defaultGoVersion + d = dir } perModVersions[d] = v perPkgVersions[n] = v @@ -156,37 +158,42 @@ func (l *Linter) lintPackage(filenames []string, gover *goversion.Version, ruleS } func detectGoMod(dir string) (rootDir string, ver *goversion.Version, err error) { - // https://github.com/golang/go/issues/44753#issuecomment-790089020 - cmd := exec.Command("go", "list", "-m", "-json") - cmd.Dir = dir + modFileName, err := retrieveModFile(dir) + if err != nil { + return "", nil, fmt.Errorf("%q doesn't seem to be part of a Go module", dir) + } - out, err := cmd.Output() + mod, err := os.ReadFile(modFileName) if err != nil { - return "", nil, fmt.Errorf("command go list: %w", err) + return "", nil, fmt.Errorf("failed to read %q, got %v", modFileName, err) } - // NOTE: A package may be part of a go workspace. In this case `go list -m` - // lists all modules in the workspace, so we need to go through them all. - d := json.NewDecoder(bytes.NewBuffer(out)) - for d.More() { - var v struct { - GoMod string `json:"GoMod"` - GoVersion string `json:"GoVersion"` - Dir string `json:"Dir"` - } - if err = d.Decode(&v); err != nil { - return "", nil, err - } - if v.GoMod == "" { - return "", nil, fmt.Errorf("not part of a module: %q", dir) + modAst, err := modfile.ParseLax(modFileName, mod, nil) + if err != nil { + return "", nil, fmt.Errorf("failed to parse %q, got %v", modFileName, err) + } + + ver, err = goversion.NewVersion(modAst.Go.Version) + return filepath.Dir(modFileName), ver, err +} + +func retrieveModFile(dir string) (string, error) { + const lookingForFile = "go.mod" + for { + if dir == "." || dir == "/" { + return "", fmt.Errorf("did not found %q file", lookingForFile) } - if v.Dir != "" && strings.HasPrefix(dir, v.Dir) { - rootDir = v.Dir - ver, err = goversion.NewVersion(strings.TrimPrefix(v.GoVersion, "go")) - return rootDir, ver, err + + lookingForFilePath := filepath.Join(dir, lookingForFile) + info, err := os.Stat(lookingForFilePath) + if err != nil || info.IsDir() { + // lets check the parent dir + dir = filepath.Dir(dir) + continue } + + return lookingForFilePath, nil } - return "", nil, fmt.Errorf("not part of a module: %q", dir) } // isGenerated reports whether the source file is generated code diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/name.go similarity index 100% rename from vendor/github.com/mgechev/revive/lint/utils.go rename to vendor/github.com/mgechev/revive/lint/name.go diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go index b4a0a72c79..2ab035f169 100644 --- a/vendor/github.com/mgechev/revive/lint/package.go +++ b/vendor/github.com/mgechev/revive/lint/package.go @@ -33,6 +33,7 @@ var ( falseValue = 2 notSet = 3 + go121 = goversion.Must(goversion.NewVersion("1.21")) go122 = goversion.Must(goversion.NewVersion("1.22")) ) @@ -165,17 +166,17 @@ func (p *Package) scanSortable() { // bitfield for which methods exist on each type. const ( - Len = 1 << iota - Less - Swap + bfLen = 1 << iota + bfLess + bfSwap ) - nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap} + nmap := map[string]int{"Len": bfLen, "Less": bfLess, "Swap": bfSwap} has := make(map[string]int) for _, f := range p.files { ast.Walk(&walker{nmap, has}, f.AST) } for typ, ms := range has { - if ms == Len|Less|Swap { + if ms == bfLen|bfLess|bfSwap { p.sortable[typ] = true } } @@ -194,6 +195,11 @@ func (p *Package) lint(rules []Rule, config Config, failures chan Failure) { wg.Wait() } +// IsAtLeastGo121 returns true if the Go version for this package is 1.21 or higher, false otherwise +func (p *Package) IsAtLeastGo121() bool { + return p.goVersion.GreaterThanOrEqual(go121) +} + // IsAtLeastGo122 returns true if the Go version for this package is 1.22 or higher, false otherwise func (p *Package) IsAtLeastGo122() bool { return p.goVersion.GreaterThanOrEqual(go122) diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go index 73dfa932cd..233f1d8489 100644 --- a/vendor/github.com/mgechev/revive/rule/add-constant.go +++ b/vendor/github.com/mgechev/revive/rule/add-constant.go @@ -160,12 +160,15 @@ func (w *lintAddConstantRule) isIgnoredFunc(fName string) bool { } func (w *lintAddConstantRule) checkStrLit(n *ast.BasicLit) { + const ignoreMarker = -1 + if w.allowList[kindSTRING][n.Value] { return } count := w.strLits[n.Value] - if count >= 0 { + mustCheck := count > ignoreMarker + if mustCheck { w.strLits[n.Value] = count + 1 if w.strLits[n.Value] > w.strLitLimit { w.onFailure(lint.Failure{ diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go index 8120288fd5..b6ce0e81a7 100644 --- a/vendor/github.com/mgechev/revive/rule/argument-limit.go +++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go @@ -10,7 +10,7 @@ import ( // ArgumentsLimitRule lints given else constructs. type ArgumentsLimitRule struct { - total int + max int sync.Mutex } @@ -19,18 +19,20 @@ const defaultArgumentsLimit = 8 func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.total == 0 { - if len(arguments) < 1 { - r.total = defaultArgumentsLimit - return - } + if r.max != 0 { + return + } - total, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "argument-limit" rule`) - } - r.total = int(total) + if len(arguments) < 1 { + r.max = defaultArgumentsLimit + return } + + maxArguments, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "argument-limit" rule`) + } + r.max = int(maxArguments) } // Apply applies the rule to given file. @@ -43,7 +45,7 @@ func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) [] } walker := lintArgsNum{ - total: r.total, + max: r.max, onFailure: onFailure, } @@ -58,27 +60,30 @@ func (*ArgumentsLimitRule) Name() string { } type lintArgsNum struct { - total int + max int onFailure func(lint.Failure) } func (w lintArgsNum) Visit(n ast.Node) ast.Visitor { node, ok := n.(*ast.FuncDecl) - if ok { - num := 0 - for _, l := range node.Type.Params.List { - for range l.Names { - num++ - } - } - if num > w.total { - w.onFailure(lint.Failure{ - Confidence: 1, - Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num), - Node: node.Type, - }) - return w + if !ok { + return w + } + + num := 0 + for _, l := range node.Type.Params.List { + for range l.Names { + num++ } } - return w + + if num > w.max { + w.onFailure(lint.Failure{ + Confidence: 1, + Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.max, num), + Node: node.Type, + }) + } + + return nil // skip visiting the body of the function } diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go index a3d50b4f7e..0ddb4aad2b 100644 --- a/vendor/github.com/mgechev/revive/rule/blank-imports.go +++ b/vendor/github.com/mgechev/revive/rule/blank-imports.go @@ -22,9 +22,8 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu } const ( - message = "a blank import should be only in a main or test package, or have a comment justifying it" - category = "imports" - + message = "a blank import should be only in a main or test package, or have a comment justifying it" + category = "imports" embedImportPath = `"embed"` ) @@ -39,7 +38,8 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu continue // Ignore non-blank imports. } - if i > 0 { + isNotFirstElement := i > 0 + if isNotFirstElement { prev := file.AST.Imports[i-1] prevPos := file.ToPosition(prev.Pos()) diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go index d6150339b9..71551e55a0 100644 --- a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go +++ b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go @@ -45,7 +45,6 @@ func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor { lexeme, ok := isExprABooleanLit(n.X) if !ok { lexeme, ok = isExprABooleanLit(n.Y) - if !ok { return w } diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go index 1973faef87..83640fd3d1 100644 --- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go +++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go @@ -21,19 +21,21 @@ const defaultMaxCognitiveComplexity = 7 func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.maxComplexity == 0 { + if r.maxComplexity != 0 { + return // already configured + } - if len(arguments) < 1 { - r.maxComplexity = defaultMaxCognitiveComplexity - return - } + if len(arguments) < 1 { + r.maxComplexity = defaultMaxCognitiveComplexity + return + } - complexity, ok := arguments[0].(int64) - if !ok { - panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) - } - r.maxComplexity = int(complexity) + complexity, ok := arguments[0].(int64) + if !ok { + panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0])) } + + r.maxComplexity = int(complexity) } // Apply applies the rule to given file. diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment-spacings.go index bfb7eaf233..f721513017 100644 --- a/vendor/github.com/mgechev/revive/rule/comment-spacings.go +++ b/vendor/github.com/mgechev/revive/rule/comment-spacings.go @@ -18,16 +18,17 @@ type CommentSpacingsRule struct { func (r *CommentSpacingsRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() + if r.allowList != nil { + return // already configured + } - if r.allowList == nil { - r.allowList = []string{} - for _, arg := range arguments { - allow, ok := arg.(string) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg)) - } - r.allowList = append(r.allowList, `//`+allow) + r.allowList = []string{} + for _, arg := range arguments { + allow, ok := arg.(string) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg)) } + r.allowList = append(r.allowList, `//`+allow) } } diff --git a/vendor/github.com/mgechev/revive/rule/comments-density.go b/vendor/github.com/mgechev/revive/rule/comments-density.go index 5956fea237..c5298ea07d 100644 --- a/vendor/github.com/mgechev/revive/rule/comments-density.go +++ b/vendor/github.com/mgechev/revive/rule/comments-density.go @@ -53,7 +53,8 @@ func (r *CommentsDensityRule) Apply(file *lint.File, arguments lint.Arguments) [ { Node: file.AST, Confidence: 1, - Failure: fmt.Sprintf("the file has a comment density of %2.f%% (%d comment lines for %d code lines) but expected a minimum of %d%%", density, commentsLines, statementsCount, r.minimumCommentsDensity), + Failure: fmt.Sprintf("the file has a comment density of %2.f%% (%d comment lines for %d code lines) but expected a minimum of %d%%", + density, commentsLines, statementsCount, r.minimumCommentsDensity), }, } } diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go index 36cd641f74..9e34d3d16b 100644 --- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go +++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go @@ -41,8 +41,9 @@ func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor { return w } - if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same - return w + subExpressionsAreNotEqual := gofmt(n.X) != gofmt(n.Y) + if subExpressionsAreNotEqual { + return w // nothing to say } // Handles cases like: a <= a, a == a, a >= a diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go index 9f6d50043d..10413de24c 100644 --- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go +++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go @@ -22,18 +22,20 @@ const defaultMaxCyclomaticComplexity = 10 func (r *CyclomaticRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.maxComplexity == 0 { - if len(arguments) < 1 { - r.maxComplexity = defaultMaxCyclomaticComplexity - return - } + if r.maxComplexity != 0 { + return // already configured + } - complexity, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0])) - } - r.maxComplexity = int(complexity) + if len(arguments) < 1 { + r.maxComplexity = defaultMaxCyclomaticComplexity + return + } + + complexity, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0])) } + r.maxComplexity = int(complexity) } // Apply applies the rule to given file. @@ -70,31 +72,35 @@ type lintCyclomatic struct { func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor { f := w.file for _, decl := range f.AST.Decls { - if fn, ok := decl.(*ast.FuncDecl); ok { - c := complexity(fn) - if c > w.complexity { - w.onFailure(lint.Failure{ - Confidence: 1, - Category: "maintenance", - Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)", - funcName(fn), c, w.complexity), - Node: fn, - }) - } + fn, ok := decl.(*ast.FuncDecl) + if !ok { + continue + } + + c := complexity(fn) + if c > w.complexity { + w.onFailure(lint.Failure{ + Confidence: 1, + Category: "maintenance", + Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)", + funcName(fn), c, w.complexity), + Node: fn, + }) } } + return nil } // funcName returns the name representation of a function or method: // "(Type).Name" for methods or simply "Name" for functions. func funcName(fn *ast.FuncDecl) string { - if fn.Recv != nil { - if fn.Recv.NumFields() > 0 { - typ := fn.Recv.List[0].Type - return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) - } + declarationHasReceiver := fn.Recv != nil && fn.Recv.NumFields() > 0 + if declarationHasReceiver { + typ := fn.Recv.List[0].Type + return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name) } + return fn.Name.Name } diff --git a/vendor/github.com/mgechev/revive/rule/datarace.go b/vendor/github.com/mgechev/revive/rule/datarace.go index 86ec6e1133..21a7a706ec 100644 --- a/vendor/github.com/mgechev/revive/rule/datarace.go +++ b/vendor/github.com/mgechev/revive/rule/datarace.go @@ -80,7 +80,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor { return nil } - getIds := func(exprs ...ast.Expr) []*ast.Ident { + getIDs := func(exprs ...ast.Expr) []*ast.Ident { r := []*ast.Ident{} for _, expr := range exprs { if id, ok := expr.(*ast.Ident); ok { @@ -90,7 +90,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor { return r } - ids := getIds(n.Key, n.Value) + ids := getIDs(n.Key, n.Value) for _, id := range ids { w.rangeIDs[id.Obj] = struct{}{} } diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go index 918d4294a9..7b3dd0f822 100644 --- a/vendor/github.com/mgechev/revive/rule/deep-exit.go +++ b/vendor/github.com/mgechev/revive/rule/deep-exit.go @@ -73,9 +73,10 @@ func (w lintDeepExit) Visit(node ast.Node) ast.Visitor { return w } - fn := fc.Sel.Name pkg := id.Name - if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function + fn := fc.Sel.Name + isACallToExitFunction := w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] + if isACallToExitFunction { w.onFailure(lint.Failure{ Confidence: 1, Node: ce, diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go index adc6478aee..3c31d507bf 100644 --- a/vendor/github.com/mgechev/revive/rule/defer.go +++ b/vendor/github.com/mgechev/revive/rule/defer.go @@ -16,10 +16,12 @@ type DeferRule struct { func (r *DeferRule) configure(arguments lint.Arguments) { r.Lock() - if r.allow == nil { - r.allow = r.allowFromArgs(arguments) + defer r.Unlock() + if r.allow != nil { + return // already configured } - r.Unlock() + + r.allow = r.allowFromArgs(arguments) } // Apply applies the rule to given file. @@ -111,7 +113,7 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor { // but it is very likely to be a misunderstanding of defer's behavior around arguments. w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover") } - + return nil // no need to analyze the arguments of the function call case *ast.DeferStmt: if isIdent(n.Call.Fun, "recover") { // defer recover() diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go index 6b877677db..df0b2a7f4f 100644 --- a/vendor/github.com/mgechev/revive/rule/dot-imports.go +++ b/vendor/github.com/mgechev/revive/rule/dot-imports.go @@ -59,17 +59,17 @@ func (r *DotImportsRule) configure(arguments lint.Arguments) { } if allowedPkgArg, ok := args["allowedPackages"]; ok { - if pkgs, ok := allowedPkgArg.([]any); ok { - for _, p := range pkgs { - if pkg, ok := p.(string); ok { - r.allowedPackages.add(pkg) - } else { - panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p)) - } - } - } else { + pkgs, ok := allowedPkgArg.([]any) + if !ok { panic(fmt.Sprintf("Invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg)) } + for _, p := range pkgs { + pkg, ok := p.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p)) + } + r.allowedPackages.add(pkg) + } } } @@ -81,12 +81,13 @@ type lintImports struct { } func (w lintImports) Visit(_ ast.Node) ast.Visitor { - for _, is := range w.fileAst.Imports { - if is.Name != nil && is.Name.Name == "." && !w.allowPackages.isAllowedPackage(is.Path.Value) { + for _, importSpec := range w.fileAst.Imports { + isDotImport := importSpec.Name != nil && importSpec.Name.Name == "." + if isDotImport && !w.allowPackages.isAllowedPackage(importSpec.Path.Value) { w.onFailure(lint.Failure{ Confidence: 1, Failure: "should not use dot imports", - Node: is, + Node: importSpec, Category: "imports", }) } diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go index 9c04a1dbe9..62d491f27d 100644 --- a/vendor/github.com/mgechev/revive/rule/early-return.go +++ b/vendor/github.com/mgechev/revive/rule/early-return.go @@ -21,27 +21,27 @@ func (*EarlyReturnRule) Name() string { return "early-return" } -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { +// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable. +func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string { if !chain.Else.Deviates() { // this rule only applies if the else-block deviates control flow - return + return "" } if chain.HasPriorNonDeviating && !chain.If.IsEmpty() { // if we de-indent this block then a previous branch // might flow into it, affecting program behaviour - return + return "" } if chain.If.Deviates() { // avoid overlapping with superfluous-else - return + return "" } if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls) { // avoid increasing variable scope - return + return "" } if chain.If.IsEmpty() { diff --git a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go index 36ac2374c2..c698c40ed8 100644 --- a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go +++ b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go @@ -65,7 +65,6 @@ func (r *EnforceMapStyleRule) configure(arguments lint.Arguments) { var err error r.enforceMapStyle, err = mapStyleFromString(enforceMapStyle) - if err != nil { panic(fmt.Sprintf("Invalid argument to the enforce-map-style rule: %v", err)) } @@ -94,8 +93,8 @@ func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) [ return true } - if len(v.Elts) > 0 { - // not an empty map + isEmptyMap := len(v.Elts) > 0 + if isEmptyMap { return true } diff --git a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go index 067082b1b0..a435ee186c 100644 --- a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go +++ b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go @@ -3,7 +3,6 @@ package rule import ( "fmt" "go/ast" - "go/types" "sync" "github.com/mgechev/revive/lint" @@ -104,13 +103,6 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint. var failures []lint.Failure - err := file.Pkg.TypeCheck() - if err != nil { - // the file has other issues - return nil - } - typesInfo := file.Pkg.TypesInfo() - astFile := file.AST ast.Inspect(astFile, func(n ast.Node) bool { switch fn := n.(type) { @@ -134,12 +126,14 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint. var prevType ast.Expr if fn.Type.Params != nil { for _, field := range fn.Type.Params.List { - if types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) { + prevTypeStr := gofmt(prevType) + currentTypeStr := gofmt(field.Type) + if currentTypeStr == prevTypeStr { failures = append(failures, lint.Failure{ Confidence: 1, - Node: field, + Node: prevType, Category: "style", - Failure: "repeated argument type can be omitted", + Failure: fmt.Sprintf("repeated argument type %q can be omitted", prevTypeStr), }) } prevType = field.Type @@ -166,12 +160,14 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint. var prevType ast.Expr if fn.Type.Results != nil { for _, field := range fn.Type.Results.List { - if field.Names != nil && types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) { + prevTypeStr := gofmt(prevType) + currentTypeStr := gofmt(field.Type) + if field.Names != nil && currentTypeStr == prevTypeStr { failures = append(failures, lint.Failure{ Confidence: 1, - Node: field, + Node: prevType, Category: "style", - Failure: "repeated return type can be omitted", + Failure: fmt.Sprintf("repeated return type %q can be omitted", prevTypeStr), }) } prevType = field.Type diff --git a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go index 60d8ac0665..14be258935 100644 --- a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go +++ b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go @@ -69,7 +69,6 @@ func (r *EnforceSliceStyleRule) configure(arguments lint.Arguments) { var err error r.enforceSliceStyle, err = sliceStyleFromString(enforceSliceStyle) - if err != nil { panic(fmt.Sprintf("Invalid argument to the enforce-slice-style rule: %v", err)) } @@ -101,8 +100,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) return true } - if len(v.Elts) > 0 { - // not an empty slice + isNotEmptySlice := len(v.Elts) > 0 + if isNotEmptySlice { return true } @@ -132,8 +131,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) return true } - if len(v.Args) < 2 { - // skip invalid make declarations + isInvalidMakeDeclaration := len(v.Args) < 2 + if isInvalidMakeDeclaration { return true } @@ -148,8 +147,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) return true } - if arg.Value != "0" { - // skip slice with non-zero size + isSliceSizeNotZero := arg.Value != "0" + if isSliceSizeNotZero { return true } @@ -160,8 +159,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments) return true } - if arg.Value != "0" { - // skip non-zero capacity slice + isNonZeroCapacitySlice := arg.Value != "0" + if isNonZeroCapacitySlice { return true } } diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go index b8663c48c6..e3972d40e5 100644 --- a/vendor/github.com/mgechev/revive/rule/exported.go +++ b/vendor/github.com/mgechev/revive/rule/exported.go @@ -13,28 +13,93 @@ import ( "github.com/mgechev/revive/lint" ) +// disabledChecks store ignored warnings types +type disabledChecks struct { + Const bool + Function bool + Method bool + PrivateReceivers bool + PublicInterfaces bool + Stuttering bool + Type bool + Var bool +} + +const checkNamePrivateReceivers = "privateReceivers" +const checkNamePublicInterfaces = "publicInterfaces" +const checkNameStuttering = "stuttering" + +// isDisabled returns true if the given check is disabled, false otherwise +func (dc *disabledChecks) isDisabled(checkName string) bool { + switch checkName { + case "var": + return dc.Var + case "const": + return dc.Const + case "function": + return dc.Function + case "method": + return dc.Method + case checkNamePrivateReceivers: + return dc.PrivateReceivers + case checkNamePublicInterfaces: + return dc.PublicInterfaces + case checkNameStuttering: + return dc.Stuttering + case "type": + return dc.Type + default: + return false + } +} + // ExportedRule lints given else constructs. type ExportedRule struct { - configured bool - checkPrivateReceivers bool - disableStutteringCheck bool - stuttersMsg string + configured bool + stuttersMsg string + disabledChecks disabledChecks sync.Mutex } func (r *ExportedRule) configure(arguments lint.Arguments) { r.Lock() - if !r.configured { - var sayRepetitiveInsteadOfStutters bool - r.checkPrivateReceivers, r.disableStutteringCheck, sayRepetitiveInsteadOfStutters = r.getConf(arguments) - r.stuttersMsg = "stutters" - if sayRepetitiveInsteadOfStutters { - r.stuttersMsg = "is repetitive" - } + defer r.Unlock() + if r.configured { + return + } + r.configured = true - r.configured = true + r.disabledChecks = disabledChecks{PrivateReceivers: true, PublicInterfaces: true} + r.stuttersMsg = "stutters" + for _, flag := range arguments { + switch flag := flag.(type) { + case string: + switch flag { + case "checkPrivateReceivers": + r.disabledChecks.PrivateReceivers = false + case "disableStutteringCheck": + r.disabledChecks.Stuttering = true + case "sayRepetitiveInsteadOfStutters": + r.stuttersMsg = "is repetitive" + case "checkPublicInterface": + r.disabledChecks.PublicInterfaces = false + case "disableChecksOnConstants": + r.disabledChecks.Const = true + case "disableChecksOnFunctions": + r.disabledChecks.Function = true + case "disableChecksOnMethods": + r.disabledChecks.Method = true + case "disableChecksOnTypes": + r.disabledChecks.Type = true + case "disableChecksOnVariables": + r.disabledChecks.Var = true + default: + panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flag, r.Name())) + } + default: + panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag)) + } } - r.Unlock() } // Apply applies the rule to given file. @@ -55,9 +120,8 @@ func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failur failures = append(failures, failure) }, genDeclMissingComments: make(map[*ast.GenDecl]bool), - checkPrivateReceivers: r.checkPrivateReceivers, - disableStutteringCheck: r.disableStutteringCheck, stuttersMsg: r.stuttersMsg, + disabledChecks: r.disabledChecks, } ast.Walk(&walker, fileAst) @@ -70,61 +134,36 @@ func (*ExportedRule) Name() string { return "exported" } -func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters bool) { - // if any, we expect a slice of strings as configuration - if len(args) < 1 { - return - } - for _, flag := range args { - flagStr, ok := flag.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag)) - } - - switch flagStr { - case "checkPrivateReceivers": - checkPrivateReceivers = true - case "disableStutteringCheck": - disableStutteringCheck = true - case "sayRepetitiveInsteadOfStutters": - sayRepetitiveInsteadOfStutters = true - default: - panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flagStr, r.Name())) - } - } - - return -} - type lintExported struct { file *lint.File fileAst *ast.File lastGen *ast.GenDecl genDeclMissingComments map[*ast.GenDecl]bool onFailure func(lint.Failure) - checkPrivateReceivers bool - disableStutteringCheck bool stuttersMsg string + disabledChecks disabledChecks } func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { if !ast.IsExported(fn.Name.Name) { - // func is unexported - return + return // func is unexported, nothing to do } + kind := "function" name := fn.Name.Name - if fn.Recv != nil && len(fn.Recv.List) > 0 { - // method + isMethod := fn.Recv != nil && len(fn.Recv.List) > 0 + if isMethod { kind = "method" recv := typeparams.ReceiverType(fn) - if !w.checkPrivateReceivers && !ast.IsExported(recv) { - // receiver is unexported + + if !ast.IsExported(recv) && w.disabledChecks.PrivateReceivers { return } + if commonMethods[name] { return } + switch name { case "Len", "Less", "Swap": sortables := w.file.Pkg.Sortable() @@ -134,6 +173,11 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { } name = recv + "." + name } + + if w.disabledChecks.isDisabled(kind) { + return + } + if fn.Doc == nil { w.onFailure(lint.Failure{ Node: fn, @@ -143,6 +187,7 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { }) return } + s := normalizeText(fn.Doc.Text()) prefix := fn.Name.Name + " " if !strings.HasPrefix(s, prefix) { @@ -156,7 +201,7 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) { } func (w *lintExported) checkStutter(id *ast.Ident, thing string) { - if w.disableStutteringCheck { + if w.disabledChecks.Stuttering { return } @@ -190,9 +235,14 @@ func (w *lintExported) checkStutter(id *ast.Ident, thing string) { } func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { + if w.disabledChecks.isDisabled("type") { + return + } + if !ast.IsExported(t.Name.Name) { return } + if doc == nil { w.onFailure(lint.Failure{ Node: t, @@ -214,14 +264,19 @@ func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) { break } } - if !strings.HasPrefix(s, t.Name.Name+" ") { - w.onFailure(lint.Failure{ - Node: doc, - Confidence: 1, - Category: "comments", - Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name), - }) + + // if comment starts with name of type and has some text after - it's ok + expectedPrefix := t.Name.Name + " " + if strings.HasPrefix(s, expectedPrefix) { + return } + + w.onFailure(lint.Failure{ + Node: doc, + Confidence: 1, + Category: "comments", + Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%s..." (with optional leading article)`, t.Name, expectedPrefix), + }) } func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) { @@ -230,6 +285,10 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD kind = "const" } + if w.disabledChecks.isDisabled(kind) { + return + } + if len(vs.Names) > 1 { // Check that none are exported except for the first. for _, n := range vs.Names[1:] { @@ -251,7 +310,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD return } - if vs.Doc == nil && vs.Comment == nil && gd.Doc == nil { + if vs.Doc == nil && gd.Doc == nil { if genDeclMissingComments[gd] { return } @@ -301,7 +360,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD // // This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly func normalizeText(t string) string { - return strings.TrimPrefix(t, " ") + return strings.TrimSpace(t) } func (w *lintExported) Visit(n ast.Node) ast.Visitor { @@ -330,7 +389,15 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor { } w.lintTypeDoc(v, doc) w.checkStutter(v.Name, "type") - // Don't proceed inside types. + + if !w.disabledChecks.PublicInterfaces { + if iface, ok := v.Type.(*ast.InterfaceType); ok { + if ast.IsExported(v.Name.Name) { + w.doCheckPublicInterface(v.Name.Name, iface) + } + } + } + return nil case *ast.ValueSpec: w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments) @@ -338,3 +405,38 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor { } return w } + +func (w *lintExported) doCheckPublicInterface(typeName string, iface *ast.InterfaceType) { + for _, m := range iface.Methods.List { + w.lintInterfaceMethod(typeName, m) + } +} + +func (w *lintExported) lintInterfaceMethod(typeName string, m *ast.Field) { + if len(m.Names) == 0 { + return + } + if !ast.IsExported(m.Names[0].Name) { + return + } + name := m.Names[0].Name + if m.Doc == nil { + w.onFailure(lint.Failure{ + Node: m, + Confidence: 1, + Category: "comments", + Failure: fmt.Sprintf("public interface method %s.%s should be commented", typeName, name), + }) + return + } + s := normalizeText(m.Doc.Text()) + expectedPrefix := m.Names[0].Name + " " + if !strings.HasPrefix(s, expectedPrefix) { + w.onFailure(lint.Failure{ + Node: m.Doc, + Confidence: 0.8, + Category: "comments", + Failure: fmt.Sprintf(`comment on exported interface method %s.%s should be of the form "%s..."`, typeName, name, expectedPrefix), + }) + } +} diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go index a7d69ff2b1..0dcb57746e 100644 --- a/vendor/github.com/mgechev/revive/rule/file-header.go +++ b/vendor/github.com/mgechev/revive/rule/file-header.go @@ -22,16 +22,18 @@ var ( func (r *FileHeaderRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.header == "" { - if len(arguments) < 1 { - return - } + if r.header != "" { + return // already configured + } - var ok bool - r.header, ok = arguments[0].(string) - if !ok { - panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0])) - } + if len(arguments) < 1 { + return + } + + var ok bool + r.header, ok = arguments[0].(string) + if !ok { + panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0])) } } diff --git a/vendor/github.com/mgechev/revive/rule/file-length-limit.go b/vendor/github.com/mgechev/revive/rule/file-length-limit.go new file mode 100644 index 0000000000..c5a5641f41 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/file-length-limit.go @@ -0,0 +1,138 @@ +package rule + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/token" + "strings" + "sync" + + "github.com/mgechev/revive/lint" +) + +// FileLengthLimitRule lints the number of lines in a file. +type FileLengthLimitRule struct { + // max is the maximum number of lines allowed in a file. 0 means the rule is disabled. + max int + // skipComments indicates whether to skip comment lines when counting lines. + skipComments bool + // skipBlankLines indicates whether to skip blank lines when counting lines. + skipBlankLines bool + sync.Mutex +} + +// Apply applies the rule to given file. +func (r *FileLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + r.configure(arguments) + + if r.max <= 0 { + // when max is negative or 0 the rule is disabled + return nil + } + + all := 0 + blank := 0 + scanner := bufio.NewScanner(bytes.NewReader(file.Content())) + for scanner.Scan() { + all++ + if len(bytes.TrimSpace(scanner.Bytes())) == 0 { + blank++ + } + } + + if err := scanner.Err(); err != nil { + panic(err.Error()) + } + + lines := all + if r.skipComments { + lines -= countCommentLines(file.AST.Comments) + } + + if r.skipBlankLines { + lines -= blank + } + + if lines <= r.max { + return nil + } + + return []lint.Failure{ + { + Category: "code-style", + Confidence: 1, + Position: lint.FailurePosition{ + Start: token.Position{ + Filename: file.Name, + Line: all, + }, + }, + Failure: fmt.Sprintf("file length is %d lines, which exceeds the limit of %d", lines, r.max), + }, + } +} + +func (r *FileLengthLimitRule) configure(arguments lint.Arguments) { + r.Lock() + defer r.Unlock() + + if r.max != 0 { + return // already configured + } + + if len(arguments) < 1 { + return // use default + } + + argKV, ok := arguments[0].(map[string]any) + if !ok { + panic(fmt.Sprintf(`invalid argument to the "file-length-limit" rule. Expecting a k,v map, got %T`, arguments[0])) + } + for k, v := range argKV { + switch k { + case "max": + maxLines, ok := v.(int64) + if !ok || maxLines < 0 { + panic(fmt.Sprintf(`invalid configuration value for max lines in "file-length-limit" rule; need positive int64 but got %T`, arguments[0])) + } + r.max = int(maxLines) + case "skipComments": + skipComments, ok := v.(bool) + if !ok { + panic(fmt.Sprintf(`invalid configuration value for skip comments in "file-length-limit" rule; need bool but got %T`, arguments[1])) + } + r.skipComments = skipComments + case "skipBlankLines": + skipBlankLines, ok := v.(bool) + if !ok { + panic(fmt.Sprintf(`invalid configuration value for skip blank lines in "file-length-limit" rule; need bool but got %T`, arguments[2])) + } + r.skipBlankLines = skipBlankLines + } + } +} + +// Name returns the rule name. +func (*FileLengthLimitRule) Name() string { + return "file-length-limit" +} + +func countCommentLines(comments []*ast.CommentGroup) int { + count := 0 + for _, cg := range comments { + for _, comment := range cg.List { + if len(comment.Text) < 2 { + continue + } + switch comment.Text[1] { + case '/': // single-line comment + count++ + case '*': // multi-line comment + count += strings.Count(comment.Text, "\n") + 1 + } + } + } + return count +} diff --git a/vendor/github.com/mgechev/revive/rule/filename-format.go b/vendor/github.com/mgechev/revive/rule/filename-format.go new file mode 100644 index 0000000000..49fdf9c3e8 --- /dev/null +++ b/vendor/github.com/mgechev/revive/rule/filename-format.go @@ -0,0 +1,87 @@ +package rule + +import ( + "fmt" + "path/filepath" + "regexp" + "sync" + "unicode" + + "github.com/mgechev/revive/lint" +) + +// FilenameFormatRule lints source filenames according to a set of regular expressions given as arguments +type FilenameFormatRule struct { + format *regexp.Regexp + sync.Mutex +} + +// Apply applies the rule to the given file. +func (r *FilenameFormatRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure { + r.configure(arguments) + + filename := filepath.Base(file.Name) + if r.format.MatchString(filename) { + return nil + } + + failureMsg := fmt.Sprintf("Filename %s is not of the format %s.%s", filename, r.format.String(), r.getMsgForNonASCIIChars(filename)) + return []lint.Failure{{ + Confidence: 1, + Failure: failureMsg, + RuleName: r.Name(), + Node: file.AST.Name, + }} +} + +func (r *FilenameFormatRule) getMsgForNonASCIIChars(str string) string { + result := "" + for _, c := range str { + if c <= unicode.MaxASCII { + continue + } + + result += fmt.Sprintf(" Non ASCII character %c (%U) found.", c, c) + } + + return result +} + +// Name returns the rule name. +func (*FilenameFormatRule) Name() string { + return "filename-format" +} + +var defaultFormat = regexp.MustCompile("^[_A-Za-z0-9][_A-Za-z0-9-]*.go$") + +func (r *FilenameFormatRule) configure(arguments lint.Arguments) { + r.Lock() + defer r.Unlock() + + if r.format != nil { + return + } + + argsCount := len(arguments) + if argsCount == 0 { + r.format = defaultFormat + return + } + + if argsCount > 1 { + panic(fmt.Sprintf("rule %q expects only one argument, got %d %v", r.Name(), argsCount, arguments)) + } + + arg := arguments[0] + str, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("rule %q expects a string argument, got %v of type %T", r.Name(), arg, arg)) + } + + format, err := regexp.Compile(str) + if err != nil { + panic(fmt.Sprintf("rule %q expects a valid regexp argument, got %v for %s", r.Name(), err, arg)) + } + + r.format = format +} diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go index fd65884e97..30402313d1 100644 --- a/vendor/github.com/mgechev/revive/rule/function-length.go +++ b/vendor/github.com/mgechev/revive/rule/function-length.go @@ -20,12 +20,14 @@ type FunctionLength struct { func (r *FunctionLength) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if !r.configured { - maxStmt, maxLines := r.parseArguments(arguments) - r.maxStmt = int(maxStmt) - r.maxLines = int(maxLines) - r.configured = true + if r.configured { + return } + + r.configured = true + maxStmt, maxLines := r.parseArguments(arguments) + r.maxStmt = int(maxStmt) + r.maxLines = int(maxLines) } // Apply applies the rule to given file. @@ -61,8 +63,9 @@ func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLin return defaultFuncStmtsLimit, defaultFuncLinesLimit } - if len(arguments) != 2 { - panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments))) + const minArguments = 2 + if len(arguments) != minArguments { + panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected %d arguments but got %d`, minArguments, len(arguments))) } maxStmt, maxStmtOk := arguments[0].(int64) @@ -98,7 +101,8 @@ func (w lintFuncLength) Visit(n ast.Node) ast.Visitor { } body := node.Body - if body == nil || len(node.Body.List) == 0 { + emptyBody := body == nil || len(node.Body.List) == 0 + if emptyBody { return nil } diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go index 6a0748011d..23474b5ee4 100644 --- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go +++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go @@ -19,20 +19,24 @@ const defaultResultsLimit = 3 func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.max == 0 { - if len(arguments) < 1 { - r.max = defaultResultsLimit - return - } - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) - } - if max < 0 { - panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) - } - r.max = int(max) + if r.max != 0 { + return // already configured } + + if len(arguments) < 1 { + r.max = defaultResultsLimit + return + } + + maxResults, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0])) + } + if maxResults < 0 { + panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`) + } + + r.max = int(maxResults) } // Apply applies the rule to given file. @@ -67,7 +71,8 @@ func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor { node, ok := n.(*ast.FuncDecl) if ok { num := 0 - if node.Type.Results != nil { + hasResults := node.Type.Results != nil + if hasResults { num = node.Type.Results.NumFields() } if num > w.max { @@ -76,8 +81,10 @@ func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor { Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num), Node: node.Type, }) - return w } + + return nil // skip visiting function's body } + return w } diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go index 600a40fac2..06323a0879 100644 --- a/vendor/github.com/mgechev/revive/rule/get-return.go +++ b/vendor/github.com/mgechev/revive/rule/get-return.go @@ -33,15 +33,25 @@ type lintReturnRule struct { onFailure func(lint.Failure) } +const getterPrefix = "GET" + +var lenGetterPrefix = len(getterPrefix) + func isGetter(name string) bool { - if strings.HasPrefix(strings.ToUpper(name), "GET") { - if len(name) > 3 { - c := name[3] - return !(c >= 'a' && c <= 'z') - } + nameHasGetterPrefix := strings.HasPrefix(strings.ToUpper(name), getterPrefix) + if !nameHasGetterPrefix { + return false } - return false + isJustGet := len(name) == lenGetterPrefix + if isJustGet { + return false + } + + c := name[lenGetterPrefix] + lowerCaseAfterGetterPrefix := c >= 'a' && c <= 'z' + + return !lowerCaseAfterGetterPrefix } func hasResults(rs *ast.FieldList) bool { diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical-branches.go index 9222c8a9c5..c6008925fb 100644 --- a/vendor/github.com/mgechev/revive/rule/identical-branches.go +++ b/vendor/github.com/mgechev/revive/rule/identical-branches.go @@ -39,9 +39,11 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor { return w } - if n.Else == nil { + noElseBranch := n.Else == nil + if noElseBranch { return w } + branches := []*ast.BlockStmt{n.Body} elseBranch, ok := n.Else.(*ast.BlockStmt) @@ -59,14 +61,15 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor { func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool { if len(branches) < 2 { - return false + return false // only one branch to compare thus we return } - ref := gofmt(branches[0]) - refSize := len(branches[0].List) + referenceBranch := gofmt(branches[0]) + referenceBranchSize := len(branches[0].List) for i := 1; i < len(branches); i++ { - currentSize := len(branches[i].List) - if currentSize != refSize || gofmt(branches[i]) != ref { + currentBranch := branches[i] + currentBranchSize := len(currentBranch.List) + if currentBranchSize != referenceBranchSize || gofmt(currentBranch) != referenceBranch { return false } } diff --git a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go index a6d096c8b2..48d22566a1 100644 --- a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go +++ b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go @@ -68,7 +68,7 @@ func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments) } alias := is.Name - if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aiases and should be processed by another linter rule + if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aliases and should be processed by another linter rule continue } diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go index 294ceef842..ebc1e793aa 100644 --- a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go +++ b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go @@ -18,27 +18,27 @@ func (*IndentErrorFlowRule) Name() string { return "indent-error-flow" } -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { +// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable. +func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string { if !chain.If.Deviates() { // this rule only applies if the if-block deviates control flow - return + return "" } if chain.HasPriorNonDeviating { // if we de-indent the "else" block then a previous branch // might flow into it, affecting program behaviour - return + return "" } if !chain.If.Returns() { // avoid overlapping with superfluous-else - return + return "" } if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) { // avoid increasing variable scope - return + return "" } return "if block ends with a return statement, so drop this else and outdent its block" diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go index 1a414f6914..a154b7aece 100644 --- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go +++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go @@ -23,19 +23,21 @@ const defaultLineLengthLimit = 80 func (r *LineLengthLimitRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.max == 0 { - if len(arguments) < 1 { - r.max = defaultLineLengthLimit - return - } + if r.max != 0 { + return // already configured + } - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok || max < 0 { - panic(`invalid value passed as argument number to the "line-length-limit" rule`) - } + if len(arguments) < 1 { + r.max = defaultLineLengthLimit + return + } - r.max = int(max) + maxLength, ok := arguments[0].(int64) // Alt. non panicking version + if !ok || maxLength < 0 { + panic(`invalid value passed as argument number to the "line-length-limit" rule`) } + + r.max = int(maxLength) } // Apply applies the rule to given file. diff --git a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go index c4eb361937..5dbb1eefa4 100644 --- a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go +++ b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go @@ -110,7 +110,7 @@ func (r *MaxControlNestingRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() if !(r.max < 1) { - return // max already set + return // max already configured } if len(arguments) < 1 { @@ -120,9 +120,9 @@ func (r *MaxControlNestingRule) configure(arguments lint.Arguments) { checkNumberOfArguments(1, arguments, r.Name()) - max, ok := arguments[0].(int64) // Alt. non panicking version + maxNesting, ok := arguments[0].(int64) // Alt. non panicking version if !ok { panic(`invalid value passed as argument number to the "max-control-nesting" rule`) } - r.max = max + r.max = maxNesting } diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go index 25be3e676f..70840e734e 100644 --- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go +++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go @@ -19,20 +19,22 @@ const defaultMaxPublicStructs = 5 func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.max < 1 { - if len(arguments) < 1 { - r.max = defaultMaxPublicStructs - return - } + if r.max == 0 { + return // already configured + } - checkNumberOfArguments(1, arguments, r.Name()) + if len(arguments) < 1 { + r.max = defaultMaxPublicStructs + return + } - max, ok := arguments[0].(int64) // Alt. non panicking version - if !ok { - panic(`invalid value passed as argument number to the "max-public-structs" rule`) - } - r.max = max + checkNumberOfArguments(1, arguments, r.Name()) + + maxStructs, ok := arguments[0].(int64) // Alt. non panicking version + if !ok { + panic(`invalid value passed as argument number to the "max-public-structs" rule`) } + r.max = maxStructs } // Apply applies the rule to given file. diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go index d79bb9fe8f..afcd99b8fd 100644 --- a/vendor/github.com/mgechev/revive/rule/receiver-naming.go +++ b/vendor/github.com/mgechev/revive/rule/receiver-naming.go @@ -3,16 +3,55 @@ package rule import ( "fmt" "go/ast" + "sync" "github.com/mgechev/revive/internal/typeparams" "github.com/mgechev/revive/lint" ) // ReceiverNamingRule lints given else constructs. -type ReceiverNamingRule struct{} +type ReceiverNamingRule struct { + receiverNameMaxLength int + sync.Mutex +} + +const defaultReceiverNameMaxLength = -1 // thus will not check + +func (r *ReceiverNamingRule) configure(arguments lint.Arguments) { + r.Lock() + defer r.Unlock() + if r.receiverNameMaxLength != 0 { + return + } + + r.receiverNameMaxLength = defaultReceiverNameMaxLength + if len(arguments) < 1 { + return + } + + args, ok := arguments[0].(map[string]any) + if !ok { + panic(fmt.Sprintf("Unable to get arguments for rule %s. Expected object of key-value-pairs.", r.Name())) + } + + for k, v := range args { + switch k { + case "maxLength": + value, ok := v.(int64) + if !ok { + panic(fmt.Sprintf("Invalid value %v for argument %s of rule %s, expected integer value got %T", v, k, r.Name(), v)) + } + r.receiverNameMaxLength = int(value) + default: + panic(fmt.Sprintf("Unknown argument %s for %s rule.", k, r.Name())) + } + } +} // Apply applies the rule to given file. -func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure { +func (r *ReceiverNamingRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure { + r.configure(args) + var failures []lint.Failure fileAst := file.AST @@ -20,7 +59,8 @@ func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, - typeReceiver: map[string]string{}, + typeReceiver: map[string]string{}, + receiverNameMaxLength: r.receiverNameMaxLength, } ast.Walk(walker, fileAst) @@ -34,8 +74,9 @@ func (*ReceiverNamingRule) Name() string { } type lintReceiverName struct { - onFailure func(lint.Failure) - typeReceiver map[string]string + onFailure func(lint.Failure) + typeReceiver map[string]string + receiverNameMaxLength int } func (w lintReceiverName) Visit(n ast.Node) ast.Visitor { @@ -66,6 +107,17 @@ func (w lintReceiverName) Visit(n ast.Node) ast.Visitor { }) return w } + + if w.receiverNameMaxLength > 0 && len([]rune(name)) > w.receiverNameMaxLength { + w.onFailure(lint.Failure{ + Node: n, + Confidence: 1, + Category: "naming", + Failure: fmt.Sprintf("receiver name %s is longer than %d characters", name, w.receiverNameMaxLength), + }) + return w + } + recv := typeparams.ReceiverType(fn) if prev, ok := w.typeReceiver[recv]; ok && prev != name { w.onFailure(lint.Failure{ diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go index b3ff084563..10ea16ae14 100644 --- a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go +++ b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go @@ -4,6 +4,7 @@ import ( "fmt" "go/ast" "go/token" + "maps" "github.com/mgechev/revive/lint" ) @@ -33,6 +34,12 @@ var builtFunctions = map[string]bool{ "recover": true, } +var builtFunctionsAfterGo121 = map[string]bool{ + "clear": true, + "max": true, + "min": true, +} + var builtInTypes = map[string]bool{ "bool": true, "byte": true, @@ -69,7 +76,17 @@ func (*RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.F } astFile := file.AST - w := &lintRedefinesBuiltinID{onFailure} + + builtFuncs := maps.Clone(builtFunctions) + if file.Pkg.IsAtLeastGo121() { + maps.Copy(builtFuncs, builtFunctionsAfterGo121) + } + w := &lintRedefinesBuiltinID{ + onFailure: onFailure, + builtInConstAndVars: builtInConstAndVars, + builtFunctions: builtFuncs, + builtInTypes: builtInTypes, + } ast.Walk(w, astFile) return failures @@ -81,7 +98,10 @@ func (*RedefinesBuiltinIDRule) Name() string { } type lintRedefinesBuiltinID struct { - onFailure func(lint.Failure) + onFailure func(lint.Failure) + builtInConstAndVars map[string]bool + builtFunctions map[string]bool + builtInTypes map[string]bool } func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { @@ -125,6 +145,31 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor { if ok, bt := w.isBuiltIn(id); ok { w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) } + case *ast.FuncType: + var fields []*ast.Field + if n.TypeParams != nil { + fields = append(fields, n.TypeParams.List...) + } + if n.Params != nil { + fields = append(fields, n.Params.List...) + } + if n.Results != nil { + fields = append(fields, n.Results.List...) + } + for _, field := range fields { + for _, name := range field.Names { + obj := name.Obj + isTypeOrName := obj != nil && (obj.Kind == ast.Var || obj.Kind == ast.Typ) + if !isTypeOrName { + continue + } + + id := obj.Name + if ok, bt := w.isBuiltIn(id); ok { + w.addFailure(name, fmt.Sprintf("redefinition of the built-in %s %s", bt, id)) + } + } + } case *ast.AssignStmt: for _, e := range n.Lhs { id, ok := e.(*ast.Ident) @@ -162,16 +207,16 @@ func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) { }) } -func (lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) { - if builtFunctions[id] { +func (w *lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) { + if w.builtFunctions[id] { return true, "function" } - if builtInConstAndVars[id] { + if w.builtInConstAndVars[id] { return true, "constant or variable" } - if builtInTypes[id] { + if w.builtInTypes[id] { return true, "type" } diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go index 70edf7387c..ecac3fa7c8 100644 --- a/vendor/github.com/mgechev/revive/rule/string-format.go +++ b/vendor/github.com/mgechev/revive/rule/string-format.go @@ -6,6 +6,7 @@ import ( "go/token" "regexp" "strconv" + "strings" "github.com/mgechev/revive/lint" ) @@ -66,12 +67,14 @@ type lintStringFormatRule struct { type stringFormatSubrule struct { parent *lintStringFormatRule - scope stringFormatSubruleScope + scopes stringFormatSubruleScopes regexp *regexp.Regexp negated bool errorMessage string } +type stringFormatSubruleScopes []*stringFormatSubruleScope + type stringFormatSubruleScope struct { funcName string // Function name the rule is scoped to argument int // (optional) Which argument in calls to the function is checked against the rule (the first argument is checked by default) @@ -90,10 +93,10 @@ var parseStringFormatScope = regexp.MustCompile( func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) { for i, argument := range arguments { - scope, regex, negated, errorMessage := w.parseArgument(argument, i) + scopes, regex, negated, errorMessage := w.parseArgument(argument, i) w.rules = append(w.rules, stringFormatSubrule{ parent: w, - scope: scope, + scopes: scopes, regexp: regex, negated: negated, errorMessage: errorMessage, @@ -101,7 +104,7 @@ func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) { } } -func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) { +func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scopes stringFormatSubruleScopes, regex *regexp.Regexp, negated bool, errorMessage string) { g, ok := argument.([]any) // Cast to generic slice first if !ok { w.configError("argument is not a slice", ruleNum, 0) @@ -125,26 +128,39 @@ func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope st w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1) } - // Parse rule scope - scope = stringFormatSubruleScope{} - matches := parseStringFormatScope.FindStringSubmatch(rule[0]) - if matches == nil { - // The rule's scope didn't match the parsing regex at all, probably a configuration error - w.parseError("unable to parse rule scope", ruleNum, 0) - } else if len(matches) != 4 { - // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug - w.parseError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0) - } - scope.funcName = matches[1] - if len(matches[2]) > 0 { - var err error - scope.argument, err = strconv.Atoi(matches[2]) - if err != nil { - w.parseError("unable to parse argument number in rule scope", ruleNum, 0) + // Parse rule scopes + rawScopes := strings.Split(rule[0], ",") + + scopes = make([]*stringFormatSubruleScope, 0, len(rawScopes)) + for scopeNum, rawScope := range rawScopes { + rawScope = strings.TrimSpace(rawScope) + + if len(rawScope) == 0 { + w.parseScopeError("empty scope in rule scopes:", ruleNum, 0, scopeNum) } - } - if len(matches[3]) > 0 { - scope.field = matches[3] + + scope := stringFormatSubruleScope{} + matches := parseStringFormatScope.FindStringSubmatch(rawScope) + if matches == nil { + // The rule's scope didn't match the parsing regex at all, probably a configuration error + w.parseScopeError("unable to parse rule scope", ruleNum, 0, scopeNum) + } else if len(matches) != 4 { + // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug + w.parseScopeError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0, scopeNum) + } + scope.funcName = matches[1] + if len(matches[2]) > 0 { + var err error + scope.argument, err = strconv.Atoi(matches[2]) + if err != nil { + w.parseScopeError("unable to parse argument number in rule scope", ruleNum, 0, scopeNum) + } + } + if len(matches[3]) > 0 { + scope.field = matches[3] + } + + scopes = append(scopes, &scope) } // Strip / characters from the beginning and end of rule[1] before compiling @@ -162,7 +178,7 @@ func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope st if len(rule) == 3 { errorMessage = rule[2] } - return scope, regex, negated, errorMessage + return scopes, regex, negated, errorMessage } // Report an invalid config, this is specifically the user's fault @@ -175,6 +191,11 @@ func (lintStringFormatRule) parseError(msg string, ruleNum, option int) { panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option)) } +// Report a general scope config parsing failure, this may be the user's fault, but it isn't known for certain +func (lintStringFormatRule) parseScopeError(msg string, ruleNum, option, scopeNum int) { + panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d, scope index %d]", msg, ruleNum, option, scopeNum)) +} + // #endregion // #region Node traversal @@ -193,8 +214,10 @@ func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor { } for _, rule := range w.rules { - if rule.scope.funcName == callName { - rule.Apply(call) + for _, scope := range rule.scopes { + if scope.funcName == callName { + rule.apply(call, scope) + } } } @@ -228,15 +251,15 @@ func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok // #region Linting logic -// Apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope) -func (r *stringFormatSubrule) Apply(call *ast.CallExpr) { - if len(call.Args) <= r.scope.argument { +// apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope) +func (r *stringFormatSubrule) apply(call *ast.CallExpr, scope *stringFormatSubruleScope) { + if len(call.Args) <= scope.argument { return } - arg := call.Args[r.scope.argument] + arg := call.Args[scope.argument] var lit *ast.BasicLit - if len(r.scope.field) > 0 { + if len(scope.field) > 0 { // Try finding the scope's Field, treating arg as a composite literal composite, ok := arg.(*ast.CompositeLit) if !ok { @@ -248,7 +271,7 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) { continue } key, ok := kv.Key.(*ast.Ident) - if !ok || key.Name != r.scope.field { + if !ok || key.Name != scope.field { continue } @@ -268,39 +291,33 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) { } // Unquote the string literal before linting unquoted := lit.Value[1 : len(lit.Value)-1] - r.lintMessage(unquoted, lit) + if r.stringIsOK(unquoted) { + return + } + + r.generateFailure(lit) } -func (r *stringFormatSubrule) lintMessage(s string, node ast.Node) { +func (r *stringFormatSubrule) stringIsOK(s string) bool { + matches := r.regexp.MatchString(s) if r.negated { - if !r.regexp.MatchString(s) { - return - } - // Fail if the string does match the user's regex - var failure string - if len(r.errorMessage) > 0 { - failure = r.errorMessage - } else { - failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String()) - } - r.parent.onFailure(lint.Failure{ - Confidence: 1, - Failure: failure, - Node: node, - }) - return + return !matches } - // Fail if the string does NOT match the user's regex - if r.regexp.MatchString(s) { - return - } + return matches +} + +func (r *stringFormatSubrule) generateFailure(node ast.Node) { var failure string - if len(r.errorMessage) > 0 { + switch { + case len(r.errorMessage) > 0: failure = r.errorMessage - } else { + case r.negated: + failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String()) + case !r.negated: failure = fmt.Sprintf("string literal doesn't match user defined regex /%s/", r.regexp.String()) } + r.parent.onFailure(lint.Failure{ Confidence: 1, Failure: failure, diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go index f6ee47a731..ec3f0c7cf3 100644 --- a/vendor/github.com/mgechev/revive/rule/struct-tag.go +++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go @@ -20,23 +20,27 @@ type StructTagRule struct { func (r *StructTagRule) configure(arguments lint.Arguments) { r.Lock() defer r.Unlock() - if r.userDefined == nil && len(arguments) > 0 { - checkNumberOfArguments(1, arguments, r.Name()) - r.userDefined = make(map[string][]string, len(arguments)) - for _, arg := range arguments { - item, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg)) - } - parts := strings.Split(item, ",") - if len(parts) < 2 { - panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item)) - } - key := strings.TrimSpace(parts[0]) - for i := 1; i < len(parts); i++ { - option := strings.TrimSpace(parts[i]) - r.userDefined[key] = append(r.userDefined[key], option) - } + + mustConfigure := r.userDefined == nil && len(arguments) > 0 + if !mustConfigure { + return + } + + checkNumberOfArguments(1, arguments, r.Name()) + r.userDefined = make(map[string][]string, len(arguments)) + for _, arg := range arguments { + item, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg)) + } + parts := strings.Split(item, ",") + if len(parts) < 2 { + panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item)) + } + key := strings.TrimSpace(parts[0]) + for i := 1; i < len(parts); i++ { + option := strings.TrimSpace(parts[i]) + r.userDefined[key] = append(r.userDefined[key], option) } } } @@ -75,11 +79,13 @@ type lintStructTagRule struct { func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { case *ast.StructType: - if n.Fields == nil || n.Fields.NumFields() < 1 { + isEmptyStruct := n.Fields == nil || n.Fields.NumFields() < 1 + if isEmptyStruct { return nil // skip empty structs } - w.usedTagNbr = map[int]bool{} // init - w.usedTagName = map[string]bool{} // init + + w.usedTagNbr = map[int]bool{} + w.usedTagName = map[string]bool{} for _, f := range n.Fields.List { if f.Tag != nil { w.checkTaggedField(f) diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go index 2aa1b6b2ca..18e8f3bdd9 100644 --- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go +++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go @@ -2,6 +2,7 @@ package rule import ( "fmt" + "github.com/mgechev/revive/internal/ifelse" "github.com/mgechev/revive/lint" ) @@ -19,27 +20,27 @@ func (*SuperfluousElseRule) Name() string { return "superfluous-else" } -// CheckIfElse evaluates the rule against an ifelse.Chain. -func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) { +// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable. +func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string { if !chain.If.Deviates() { // this rule only applies if the if-block deviates control flow - return + return "" } if chain.HasPriorNonDeviating { // if we de-indent the "else" block then a previous branch // might flow into it, affecting program behaviour - return + return "" } if chain.If.Returns() { // avoid overlapping with indent-error-flow - return + return "" } if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) { // avoid increasing variable scope - return + return "" } return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString()) diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time-equal.go index 3b85e18a8e..a4fab88b39 100644 --- a/vendor/github.com/mgechev/revive/rule/time-equal.go +++ b/vendor/github.com/mgechev/revive/rule/time-equal.go @@ -50,26 +50,23 @@ func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor { return l } - xtyp := l.file.Pkg.TypeOf(expr.X) - ytyp := l.file.Pkg.TypeOf(expr.Y) - - if !isNamedType(xtyp, "time", "Time") || !isNamedType(ytyp, "time", "Time") { + typeOfX := l.file.Pkg.TypeOf(expr.X) + typeOfY := l.file.Pkg.TypeOf(expr.Y) + bothAreOfTimeType := isNamedType(typeOfX, "time", "Time") && isNamedType(typeOfY, "time", "Time") + if !bothAreOfTimeType { return l } - var failure string - switch expr.Op { - case token.EQL: - failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op) - case token.NEQ: - failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op) + negateStr := "" + if token.NEQ == expr.Op { + negateStr = "!" } l.onFailure(lint.Failure{ Category: "time", Confidence: 1, Node: node, - Failure: failure, + Failure: fmt.Sprintf("use %s%s.Equal(%s) instead of %q operator", negateStr, gofmt(expr.X), gofmt(expr.Y), expr.Op), }) return l diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time-naming.go index cea452e613..5bccf8a7a7 100644 --- a/vendor/github.com/mgechev/revive/rule/time-naming.go +++ b/vendor/github.com/mgechev/revive/rule/time-naming.go @@ -90,6 +90,7 @@ func isNamedType(typ types.Type, importPath, name string) bool { if !ok { return false } - tn := n.Obj() - return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name + + typeName := n.Obj() + return typeName != nil && typeName.Pkg() != nil && typeName.Pkg().Path() == importPath && typeName.Name() == name } diff --git a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go index df27743cbd..eea344060a 100644 --- a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go +++ b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go @@ -13,7 +13,7 @@ const ( ruleUTAMessageIgnored = "type assertion result ignored" ) -// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in danymic type casts. +// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in dynamic type casts. type UncheckedTypeAssertionRule struct { sync.Mutex acceptIgnoredAssertionResult bool @@ -54,7 +54,7 @@ func (u *UncheckedTypeAssertionRule) Apply(file *lint.File, args lint.Arguments) var failures []lint.Failure - walker := &lintUnchekedTypeAssertion{ + walker := &lintUncheckedTypeAssertion{ onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, @@ -71,7 +71,7 @@ func (*UncheckedTypeAssertionRule) Name() string { return "unchecked-type-assertion" } -type lintUnchekedTypeAssertion struct { +type lintUncheckedTypeAssertion struct { onFailure func(lint.Failure) acceptIgnoredTypeAssertionResult bool } @@ -89,14 +89,14 @@ func isTypeSwitch(e *ast.TypeAssertExpr) bool { return e.Type == nil } -func (w *lintUnchekedTypeAssertion) requireNoTypeAssert(expr ast.Expr) { +func (w *lintUncheckedTypeAssertion) requireNoTypeAssert(expr ast.Expr) { e, ok := expr.(*ast.TypeAssertExpr) if ok && !isTypeSwitch(e) { w.addFailure(e, ruleUTAMessagePanic) } } -func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) { +func (w *lintUncheckedTypeAssertion) handleIfStmt(n *ast.IfStmt) { ifCondition, ok := n.Cond.(*ast.BinaryExpr) if ok { w.requireNoTypeAssert(ifCondition.X) @@ -104,7 +104,7 @@ func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) { } } -func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) { +func (w *lintUncheckedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) { binaryExpr, ok := expr.(*ast.BinaryExpr) if ok { w.requireNoTypeAssert(binaryExpr.X) @@ -112,19 +112,19 @@ func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion( } } -func (w *lintUnchekedTypeAssertion) handleCaseClause(n *ast.CaseClause) { +func (w *lintUncheckedTypeAssertion) handleCaseClause(n *ast.CaseClause) { for _, expr := range n.List { w.requireNoTypeAssert(expr) w.requireBinaryExpressionWithoutTypeAssertion(expr) } } -func (w *lintUnchekedTypeAssertion) handleSwitch(n *ast.SwitchStmt) { +func (w *lintUncheckedTypeAssertion) handleSwitch(n *ast.SwitchStmt) { w.requireNoTypeAssert(n.Tag) w.requireBinaryExpressionWithoutTypeAssertion(n.Tag) } -func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) { +func (w *lintUncheckedTypeAssertion) handleAssignment(n *ast.AssignStmt) { if len(n.Rhs) == 0 { return } @@ -148,21 +148,21 @@ func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) { } // handles "return foo(.*bar)" - one of them is enough to fail as golang does not forward the type cast tuples in return statements -func (w *lintUnchekedTypeAssertion) handleReturn(n *ast.ReturnStmt) { +func (w *lintUncheckedTypeAssertion) handleReturn(n *ast.ReturnStmt) { for _, r := range n.Results { w.requireNoTypeAssert(r) } } -func (w *lintUnchekedTypeAssertion) handleRange(n *ast.RangeStmt) { +func (w *lintUncheckedTypeAssertion) handleRange(n *ast.RangeStmt) { w.requireNoTypeAssert(n.X) } -func (w *lintUnchekedTypeAssertion) handleChannelSend(n *ast.SendStmt) { +func (w *lintUncheckedTypeAssertion) handleChannelSend(n *ast.SendStmt) { w.requireNoTypeAssert(n.Value) } -func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor { +func (w *lintUncheckedTypeAssertion) Visit(node ast.Node) ast.Visitor { switch n := node.(type) { case *ast.RangeStmt: w.handleRange(n) @@ -183,7 +183,7 @@ func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor { return w } -func (w *lintUnchekedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) { +func (w *lintUncheckedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) { s := fmt.Sprintf("type cast result is unchecked in %v - %s", gofmt(n), why) w.onFailure(lint.Failure{ Category: "bad practice", diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go index 9ac2648cdd..d806b6757c 100644 --- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go +++ b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go @@ -185,9 +185,10 @@ func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool { return false } - fn := se.Sel.Name - pkg := id.Name - if exitFunctions[pkg] != nil && exitFunctions[pkg][fn] { // it's a call to an exit function + functionName := se.Sel.Name + pkgName := id.Name + isCallToExitFunction := exitFunctions[pkgName] != nil && exitFunctions[pkgName][functionName] + if isCallToExitFunction { return true } } diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go index ce6fa38641..95ba56180e 100644 --- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go +++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go @@ -19,27 +19,30 @@ type UnhandledErrorRule struct { func (r *UnhandledErrorRule) configure(arguments lint.Arguments) { r.Lock() - if r.ignoreList == nil { - for _, arg := range arguments { - argStr, ok := arg.(string) - if !ok { - panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) - } + defer r.Unlock() - argStr = strings.Trim(argStr, " ") - if argStr == "" { - panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.") - } + if r.ignoreList != nil { + return // already configured + } - exp, err := regexp.Compile(argStr) - if err != nil { - panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err)) - } + for _, arg := range arguments { + argStr, ok := arg.(string) + if !ok { + panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg)) + } - r.ignoreList = append(r.ignoreList, exp) + argStr = strings.Trim(argStr, " ") + if argStr == "" { + panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.") } + + exp, err := regexp.Compile(argStr) + if err != nil { + panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err)) + } + + r.ignoreList = append(r.ignoreList, exp) } - r.Unlock() } // Apply applies the rule to given file. @@ -130,9 +133,9 @@ func (w *lintUnhandledErrors) funcName(call *ast.CallExpr) string { } name := fn.FullName() - name = strings.Replace(name, "(", "", -1) - name = strings.Replace(name, ")", "", -1) - name = strings.Replace(name, "*", "", -1) + name = strings.ReplaceAll(name, "(", "") + name = strings.ReplaceAll(name, ")", "") + name = strings.ReplaceAll(name, "*", "") return name } diff --git a/vendor/github.com/mgechev/revive/rule/use-any.go b/vendor/github.com/mgechev/revive/rule/use-any.go index bdf3c936dd..88160c2fa0 100644 --- a/vendor/github.com/mgechev/revive/rule/use-any.go +++ b/vendor/github.com/mgechev/revive/rule/use-any.go @@ -47,7 +47,7 @@ func (w lintUseAny) Visit(n ast.Node) ast.Visitor { Node: n, Confidence: 1, Category: "naming", - Failure: "since GO 1.18 'interface{}' can be replaced by 'any'", + Failure: "since Go 1.18 'interface{}' can be replaced by 'any'", }) return w diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var-declarations.go index a15ff1eb41..3f9d7068a4 100644 --- a/vendor/github.com/mgechev/revive/rule/var-declarations.go +++ b/vendor/github.com/mgechev/revive/rule/var-declarations.go @@ -46,13 +46,15 @@ type lintVarDeclarations struct { func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor { switch v := node.(type) { case *ast.GenDecl: - if v.Tok != token.CONST && v.Tok != token.VAR { + isVarOrConstDeclaration := v.Tok == token.CONST || v.Tok == token.VAR + if !isVarOrConstDeclaration { return nil } w.lastGen = v return w case *ast.ValueSpec: - if w.lastGen.Tok == token.CONST { + isConstDeclaration := w.lastGen.Tok == token.CONST + if isConstDeclaration { return nil } if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 { @@ -64,14 +66,14 @@ func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor { if isIdent(v.Names[0], "_") { return nil } - // If the RHS is a zero value, suggest dropping it. - zero := false + // If the RHS is a isZero value, suggest dropping it. + isZero := false if lit, ok := rhs.(*ast.BasicLit); ok { - zero = zeroLiteral[lit.Value] + isZero = zeroLiteral[lit.Value] } else if isIdent(rhs, "nil") { - zero = true + isZero = true } - if zero { + if isZero { w.onFailure(lint.Failure{ Confidence: 0.9, Node: rhs, diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go index e91c22dc21..5a4d0dc24b 100644 --- a/vendor/github.com/mgechev/revive/rule/var-naming.go +++ b/vendor/github.com/mgechev/revive/rule/var-naming.go @@ -19,9 +19,9 @@ var upperCaseConstRE = regexp.MustCompile(`^_?[A-Z][A-Z\d]*(_[A-Z\d]+)*$`) // VarNamingRule lints given else constructs. type VarNamingRule struct { configured bool - allowlist []string - blocklist []string - upperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants + allowList []string + blockList []string + allowUpperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants skipPackageNameChecks bool sync.Mutex } @@ -35,11 +35,11 @@ func (r *VarNamingRule) configure(arguments lint.Arguments) { r.configured = true if len(arguments) >= 1 { - r.allowlist = getList(arguments[0], "allowlist") + r.allowList = getList(arguments[0], "allowlist") } if len(arguments) >= 2 { - r.blocklist = getList(arguments[1], "blocklist") + r.blockList = getList(arguments[1], "blocklist") } if len(arguments) >= 3 { @@ -56,7 +56,7 @@ func (r *VarNamingRule) configure(arguments lint.Arguments) { if !ok { panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0])) } - r.upperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true" + r.allowUpperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true" r.skipPackageNameChecks = fmt.Sprint(args["skipPackageNameChecks"]) == "true" } } @@ -79,7 +79,6 @@ func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) { Category: "naming", }) } - } // Apply applies the rule to given file. @@ -93,12 +92,12 @@ func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint. walker := lintNames{ file: file, fileAst: fileAst, - allowlist: r.allowlist, - blocklist: r.blocklist, + allowList: r.allowList, + blockList: r.blockList, onFailure: func(failure lint.Failure) { failures = append(failures, failure) }, - upperCaseConst: r.upperCaseConst, + upperCaseConst: r.allowUpperCaseConst, } if !r.skipPackageNameChecks { @@ -151,7 +150,7 @@ func (w *lintNames) check(id *ast.Ident, thing string) { return } - should := lint.Name(id.Name, w.allowlist, w.blocklist) + should := lint.Name(id.Name, w.allowList, w.blockList) if id.Name == should { return } @@ -177,8 +176,8 @@ type lintNames struct { file *lint.File fileAst *ast.File onFailure func(lint.Failure) - allowlist []string - blocklist []string + allowList []string + blockList []string upperCaseConst bool } @@ -265,17 +264,17 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor { } func getList(arg any, argName string) []string { - temp, ok := arg.([]any) + args, ok := arg.([]any) if !ok { panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg)) } var list []string - for _, v := range temp { - if val, ok := v.(string); ok { - list = append(list, val) - } else { + for _, v := range args { + val, ok := v.(string) + if !ok { panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg)) } + list = append(list, val) } return list } diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go index 98644f41c4..a2d304ae51 100644 --- a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go +++ b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go @@ -51,7 +51,7 @@ func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor { }) } - return nil + return nil // skip visiting function body } func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool { diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore index 7d7f8b10ce..67467b7170 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore +++ b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore @@ -1,2 +1,3 @@ ginkgolinter bin/ +e2e \ No newline at end of file diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile index 586633006a..8ddd8c42ce 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/Makefile +++ b/vendor/github.com/nunnatsa/ginkgolinter/Makefile @@ -5,7 +5,7 @@ HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH) BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)" -build: unit-test +build: goimports go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter unit-test: @@ -23,5 +23,11 @@ build-for-linux: build-all: build build-for-linux build-for-mac build-for-windows -test: build - ./tests/e2e.sh +test-cli: + cd tests; go test -v ./ + +test: unit-test test-cli + +goimports: + go install golang.org/x/tools/cmd/goimports@latest + goimports -w -local="github.com/nunnatsa/ginkgolinter" $(shell find . -type f -name '*.go' ! -path "*/vendor/*") diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md index 977cec903e..536a65e7be 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/README.md +++ b/vendor/github.com/nunnatsa/ginkgolinter/README.md @@ -249,6 +249,28 @@ This will probably happen when using the old format: Eventually(aFunc, 500 * time.Millisecond /*timeout*/, 10 * time.Second /*polling*/).Should(Succeed()) ``` +### Correct usage of the `Succeed()` matcher [Bug] +The `Succeed()` matcher only accepts a single error value. this rule validates that. + +For example: + ```go + Expect(42).To(Succeed()) + ``` + +But mostly, we want to avoid using this matcher with functions that return multiple values, even if their last +returned value is an error, because this is not supported: + ```go + Expect(os.Open("myFile.txt")).To(Succeed()) + ``` + +In async assertions (like `Eventually()`), the `Succeed()` matcher may also been used with functions that accept +a Gomega object as their first parameter, and returns nothing, e.g. this is a valid usage of `Eventually` + ```go + Eventually(func(g Gomega){ + g.Expect(true).To(BeTrue()) + }).WithTimeout(10 * time.Millisecond).WithPolling(time.Millisecond).Should(Succeed()) + ``` + ### Avoid Spec Pollution: Don't Initialize Variables in Container Nodes [BUG/STYLE]: ***Note***: Only applied when the `--forbid-spec-pollution=true` flag is set (disabled by default). @@ -476,6 +498,30 @@ will be changed to: ```go Eventually(aFunc, time.Second*5, time.Second*polling) ``` + +### Correct usage of the `Succeed()` and the `HaveOccurred()` matchers +This rule enforces using the `Success()` matcher only for functions, and the `HaveOccurred()` matcher only for error +values. + +For example: + ```go + Expect(err).To(Succeed()) + ``` +will trigger a warning with a suggestion to replace the mather to + ```go + Expect(err).ToNot(HaveOccurred()) + ``` + +and vice versa: + ```go + Expect(myErrorFunc()).ToNot(HaveOccurred()) + ``` +will trigger a warning with a suggestion to replace the mather to + ```go + Expect(myErrorFunc()).To(Succeed()) + ``` +***This rule is disabled by default***. Use the `--force-succeed=true` command line flag to enable it. + ## Suppress the linter ### Suppress warning from command line * Use the `--suppress-len-assertion=true` flag to suppress the wrong length and cap assertions warning diff --git a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go index edff57acd1..dbc39aba5e 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go @@ -25,13 +25,14 @@ func NewAnalyzerWithConfig(config *types.Config) *analysis.Analyzer { // NewAnalyzer returns an Analyzer - the package interface with nogo func NewAnalyzer() *analysis.Analyzer { config := &types.Config{ - SuppressLen: false, - SuppressNil: false, - SuppressErr: false, - SuppressCompare: false, - ForbidFocus: false, - AllowHaveLen0: false, - ForceExpectTo: false, + SuppressLen: false, + SuppressNil: false, + SuppressErr: false, + SuppressCompare: false, + ForbidFocus: false, + AllowHaveLen0: false, + ForceExpectTo: false, + ForceSucceedForFuncs: false, } a := NewAnalyzerWithConfig(config) @@ -50,6 +51,7 @@ func NewAnalyzer() *analysis.Analyzer { a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead") a.Flags.Var(&config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.") a.Flags.Var(&config.ForbidSpecPollution, "forbid-spec-pollution", "trigger a warning for variable assignments in ginkgo containers like Describe, Context and When, instead of in BeforeEach(); default = false.") + a.Flags.Var(&config.ForceSucceedForFuncs, "force-succeed", "force using the Succeed matcher for error functions, and the HaveOccurred matcher for non-function error values") return a } diff --git a/vendor/github.com/nunnatsa/ginkgolinter/doc.go b/vendor/github.com/nunnatsa/ginkgolinter/doc.go index dd9ecf58a8..c07b6a3164 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/doc.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/doc.go @@ -30,6 +30,14 @@ For example: This will probably happen when using the old format: Eventually(aFunc, 500 * time.Millisecond, 10 * time.Second).Should(Succeed()) +* Success matcher validation: [BUG] + The Success matcher expect that the actual argument will be a single error. In async actual assertions, It also allow + functions with Gomega object as the function first parameter. +For example: + Expect(myInt).To(Succeed()) +or + Eventually(func() int { return 42 }).Should(Succeed()) + * reject variable assignments in ginkgo containers [Bug/Style]: For example: var _ = Describe("description", func(){ @@ -96,4 +104,13 @@ methods. For example: Eventually(context.Background(), func() bool { return true }, "1s").Should(BeTrue()) Eventually(context.Background(), func() bool { return true }, time.Second*60, 15).Should(BeTrue()) + +* Success <=> Eventually usage [Style] + enforce that the Succeed() matcher will be used for error functions, and the HaveOccurred() matcher will + be used for error values. + +For example: + Expect(err).ToNot(Succeed()) +or + Expect(funcRetError().ToNot(HaveOccurred()) ` diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go new file mode 100644 index 0000000000..8e3df5d3fb --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go @@ -0,0 +1,118 @@ +package actual + +import ( + "go/ast" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" +) + +type Actual struct { + Orig *ast.CallExpr + Clone *ast.CallExpr + Arg ArgPayload + argType gotypes.Type + isTuple bool + isAsync bool + asyncArg *AsyncArg + actualOffset int +} + +func New(origExpr, cloneExpr *ast.CallExpr, orig *ast.CallExpr, clone *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, timePkg string) (*Actual, bool) { + funcName, ok := handler.GetActualFuncName(orig) + if !ok { + return nil, false + } + + arg, actualOffset := getActualArgPayload(orig, clone, pass, funcName) + if arg == nil { + return nil, false + } + + argType := pass.TypesInfo.TypeOf(orig.Args[actualOffset]) + isTuple := false + + if tpl, ok := argType.(*gotypes.Tuple); ok { + if tpl.Len() > 0 { + argType = tpl.At(0).Type() + } else { + argType = nil + } + + isTuple = tpl.Len() > 1 + } + + isAsyncExpr := gomegainfo.IsAsyncActualMethod(funcName) + + var asyncArg *AsyncArg + if isAsyncExpr { + asyncArg = newAsyncArg(origExpr, cloneExpr, orig, clone, argType, pass, actualOffset, timePkg) + } + + return &Actual{ + Orig: orig, + Clone: clone, + Arg: arg, + argType: argType, + isTuple: isTuple, + isAsync: isAsyncExpr, + asyncArg: asyncArg, + actualOffset: actualOffset, + }, true +} + +func (a *Actual) ReplaceActual(newArgs ast.Expr) { + a.Clone.Args[a.actualOffset] = newArgs +} + +func (a *Actual) ReplaceActualWithItsFirstArg() { + firstArgOfArg := a.Clone.Args[a.actualOffset].(*ast.CallExpr).Args[0] + a.ReplaceActual(firstArgOfArg) +} + +func (a *Actual) IsAsync() bool { + return a.isAsync +} + +func (a *Actual) IsTuple() bool { + return a.isTuple +} + +func (a *Actual) ArgGOType() gotypes.Type { + return a.argType +} + +func (a *Actual) GetAsyncArg() *AsyncArg { + return a.asyncArg +} + +func (a *Actual) AppendWithArgsMethod() { + if a.asyncArg.fun != nil { + if len(a.asyncArg.fun.Args) > 0 { + actualOrigFunc := a.Clone.Fun + actualOrigArgs := a.Clone.Args + + actualOrigArgs[a.actualOffset] = a.asyncArg.fun.Fun + call := &ast.SelectorExpr{ + Sel: ast.NewIdent("WithArguments"), + X: &ast.CallExpr{ + Fun: actualOrigFunc, + Args: actualOrigArgs, + }, + } + + a.Clone.Fun = call + a.Clone.Args = a.asyncArg.fun.Args + a.Clone = a.Clone.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr) + } else { + a.Clone.Args[a.actualOffset] = a.asyncArg.fun.Fun + } + } +} + +func (a *Actual) GetActualArg() ast.Expr { + return a.Clone.Args[a.actualOffset] +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go new file mode 100644 index 0000000000..9d251c4680 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go @@ -0,0 +1,235 @@ +package actual + +import ( + "go/ast" + "go/token" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" + "github.com/nunnatsa/ginkgolinter/internal/reverseassertion" +) + +type ArgType uint64 + +const ( + UnknownActualArgType ArgType = 1 << iota + ErrActualArgType + LenFuncActualArgType + CapFuncActualArgType + ComparisonActualArgType + LenComparisonActualArgType + CapComparisonActualArgType + NilComparisonActualArgType + BinaryComparisonActualArgType + FuncSigArgType + ErrFuncActualArgType + GomegaParamArgType + MultiRetsArgType + + ErrorTypeArgType + + EqualZero + GreaterThanZero +) + +func (a ArgType) Is(val ArgType) bool { + return a&val != 0 +} + +func getActualArgPayload(origActualExpr, actualExprClone *ast.CallExpr, pass *analysis.Pass, actualMethodName string) (ArgPayload, int) { + origArgExpr, argExprClone, actualOffset, isGomegaExpr := getActualArg(origActualExpr, actualExprClone, actualMethodName, pass) + if !isGomegaExpr { + return nil, 0 + } + + var arg ArgPayload + + if value.IsExprError(pass, origArgExpr) { + arg = newErrPayload(origArgExpr, argExprClone, pass) + } else { + switch expr := origArgExpr.(type) { + case *ast.CallExpr: + arg = newFuncCallArgPayload(expr, argExprClone.(*ast.CallExpr)) + + case *ast.BinaryExpr: + arg = parseBinaryExpr(expr, argExprClone.(*ast.BinaryExpr), pass) + + default: + t := pass.TypesInfo.TypeOf(origArgExpr) + if sig, ok := t.(*gotypes.Signature); ok { + arg = getAsyncFuncArg(sig) + } + } + + } + + if arg != nil { + return arg, actualOffset + } + + return newRegularArgPayload(origArgExpr, argExprClone, pass), actualOffset +} + +func getActualArg(origActualExpr *ast.CallExpr, actualExprClone *ast.CallExpr, actualMethodName string, pass *analysis.Pass) (ast.Expr, ast.Expr, int, bool) { + var ( + origArgExpr ast.Expr + argExprClone ast.Expr + ) + + funcOffset := gomegainfo.ActualArgOffset(actualMethodName) + if funcOffset < 0 { + return nil, nil, 0, false + } + + if len(origActualExpr.Args) <= funcOffset { + return nil, nil, 0, false + } + + origArgExpr = origActualExpr.Args[funcOffset] + argExprClone = actualExprClone.Args[funcOffset] + + if gomegainfo.IsAsyncActualMethod(actualMethodName) { + if pass.TypesInfo.TypeOf(origArgExpr).String() == "context.Context" { + funcOffset++ + if len(origActualExpr.Args) <= funcOffset { + return nil, nil, 0, false + } + + origArgExpr = origActualExpr.Args[funcOffset] + argExprClone = actualExprClone.Args[funcOffset] + } + } + + return origArgExpr, argExprClone, funcOffset, true +} + +type ArgPayload interface { + ArgType() ArgType +} + +type RegularArgPayload struct { + value.Value +} + +func newRegularArgPayload(orig, clone ast.Expr, pass *analysis.Pass) *RegularArgPayload { + return &RegularArgPayload{ + Value: value.New(orig, clone, pass), + } +} + +func (*RegularArgPayload) ArgType() ArgType { + return UnknownActualArgType +} + +type FuncCallArgPayload struct { + argType ArgType + + origFunc *ast.CallExpr + cloneFunc *ast.CallExpr + + origVal ast.Expr + cloneVal ast.Expr +} + +func newFuncCallArgPayload(orig, clone *ast.CallExpr) ArgPayload { + funcName, ok := builtinFuncName(orig) + if !ok { + return nil + } + + if len(orig.Args) != 1 { + return nil + } + + var argType ArgType + switch funcName { + case "len": + argType = LenFuncActualArgType + case "cap": + argType = CapFuncActualArgType + default: + return nil + } + + return &FuncCallArgPayload{ + argType: argType, + origFunc: orig, + cloneFunc: clone, + origVal: orig.Args[0], + cloneVal: clone.Args[0], + } +} + +func (f *FuncCallArgPayload) ArgType() ArgType { + return f.argType +} + +type ErrPayload struct { + value.Valuer +} + +func newErrPayload(orig, clone ast.Expr, pass *analysis.Pass) *ErrPayload { + return &ErrPayload{ + Valuer: value.GetValuer(orig, clone, pass), + } +} + +func (*ErrPayload) ArgType() ArgType { + return ErrActualArgType | ErrorTypeArgType +} + +func parseBinaryExpr(origActualExpr, argExprClone *ast.BinaryExpr, pass *analysis.Pass) ArgPayload { + left, right, op := origActualExpr.X, origActualExpr.Y, origActualExpr.Op + replace := false + switch realFirst := left.(type) { + case *ast.Ident: // check if const + info, ok := pass.TypesInfo.Types[realFirst] + if ok { + if value.Is[*gotypes.Basic](info.Type) && (info.Value != nil || info.IsNil()) { + replace = true + } + } + + case *ast.BasicLit: + replace = true + } + + if replace { + left, right = right, left + } + + switch op { + case token.EQL: + case token.NEQ: + case token.GTR, token.GEQ, token.LSS, token.LEQ: + if replace { + op = reverseassertion.ChangeCompareOperator(op) + } + default: + return nil + } + + leftClone, rightClone := argExprClone.X, argExprClone.Y + if replace { + leftClone, rightClone = rightClone, leftClone + } + + leftVal := value.GetValuer(left, leftClone, pass) + rightVal := value.GetValuer(right, rightClone, pass) + + if value.IsNil(right, pass) { + return newNilComparisonPayload(leftVal, rightVal, op) + } + + leftVal.IsFunc() + if firstFunc, ok := left.(*ast.CallExpr); ok { + if payload, ok := newFuncComparisonPayload(firstFunc, leftClone.(*ast.CallExpr), right, rightClone, op, pass); ok { + return payload + } + } + + return newComparisonArgPayload(leftVal, rightVal, op) +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go new file mode 100644 index 0000000000..7c5df2a341 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go @@ -0,0 +1,123 @@ +package actual + +import ( + "go/ast" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/intervals" +) + +type AsyncArg struct { + valid bool + fun *ast.CallExpr + + timeoutInterval intervals.DurationValue + pollingInterval intervals.DurationValue + tooManyTimeouts bool + tooManyPolling bool +} + +func newAsyncArg(origExpr, cloneExpr, orig, clone *ast.CallExpr, argType gotypes.Type, pass *analysis.Pass, actualOffset int, timePkg string) *AsyncArg { + var ( + fun *ast.CallExpr + valid = true + timeout intervals.DurationValue + polling intervals.DurationValue + ) + + if _, isActualFuncCall := orig.Args[actualOffset].(*ast.CallExpr); isActualFuncCall { + fun = clone.Args[actualOffset].(*ast.CallExpr) + valid = isValidAsyncValueType(argType) + } + + timeoutOffset := actualOffset + 1 + //var err error + tooManyTimeouts := false + tooManyPolling := false + + if len(orig.Args) > timeoutOffset { + timeout = intervals.GetDuration(pass, timeoutOffset, orig.Args[timeoutOffset], clone.Args[timeoutOffset], timePkg) + pollingOffset := actualOffset + 2 + if len(orig.Args) > pollingOffset { + polling = intervals.GetDuration(pass, pollingOffset, orig.Args[pollingOffset], clone.Args[pollingOffset], timePkg) + } + } + selOrig := origExpr.Fun.(*ast.SelectorExpr) + selClone := cloneExpr.Fun.(*ast.SelectorExpr) + + for { + callOrig, ok := selOrig.X.(*ast.CallExpr) + if !ok { + break + } + callClone := selClone.X.(*ast.CallExpr) + + funOrig, ok := callOrig.Fun.(*ast.SelectorExpr) + if !ok { + break + } + funClone := callClone.Fun.(*ast.SelectorExpr) + + switch funOrig.Sel.Name { + case "WithTimeout", "Within": + if timeout != nil { + tooManyTimeouts = true + } else if len(callOrig.Args) == 1 { + timeout = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0]) + } + + case "WithPolling", "ProbeEvery": + if polling != nil { + tooManyPolling = true + } else if len(callOrig.Args) == 1 { + polling = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0]) + } + } + + selOrig = funOrig + selClone = funClone + } + + return &AsyncArg{ + valid: valid, + fun: fun, + timeoutInterval: timeout, + pollingInterval: polling, + tooManyTimeouts: tooManyTimeouts, + tooManyPolling: tooManyPolling, + } +} + +func (a *AsyncArg) IsValid() bool { + return a.valid +} + +func (a *AsyncArg) Timeout() intervals.DurationValue { + return a.timeoutInterval +} + +func (a *AsyncArg) Polling() intervals.DurationValue { + return a.pollingInterval +} + +func (a *AsyncArg) TooManyTimeouts() bool { + return a.tooManyTimeouts +} + +func (a *AsyncArg) TooManyPolling() bool { + return a.tooManyPolling +} + +func isValidAsyncValueType(t gotypes.Type) bool { + switch t.(type) { + // allow functions that return function or channel. + case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer: + return true + case *gotypes.Named: + return isValidAsyncValueType(t.Underlying()) + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go new file mode 100644 index 0000000000..c777cd4a70 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go @@ -0,0 +1,38 @@ +package actual + +import ( + gotypes "go/types" + + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" + "github.com/nunnatsa/ginkgolinter/internal/interfaces" +) + +func getAsyncFuncArg(sig *gotypes.Signature) ArgPayload { + argType := FuncSigArgType + if sig.Results().Len() == 1 { + if interfaces.ImplementsError(sig.Results().At(0).Type().Underlying()) { + argType |= ErrFuncActualArgType | ErrorTypeArgType + } + } + + if sig.Params().Len() > 0 { + arg := sig.Params().At(0).Type() + if gomegainfo.IsGomegaType(arg) && sig.Results().Len() == 0 { + argType |= FuncSigArgType | GomegaParamArgType + } + } + + if sig.Results().Len() > 1 { + argType |= FuncSigArgType | MultiRetsArgType + } + + return &FuncSigArgPayload{argType: argType} +} + +type FuncSigArgPayload struct { + argType ArgType +} + +func (f FuncSigArgPayload) ArgType() ArgType { + return f.argType +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go new file mode 100644 index 0000000000..2b16402db9 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go @@ -0,0 +1,260 @@ +package actual + +import ( + "go/ast" + "go/constant" + "go/token" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" +) + +type ComparisonActualPayload interface { + GetOp() token.Token + GetLeft() value.Valuer + GetRight() value.Valuer +} + +type FuncComparisonPayload struct { + op token.Token + argType ArgType + val value.Valuer + left value.Valuer + arg ast.Expr +} + +func newFuncComparisonPayload(origLeft, leftClone *ast.CallExpr, origRight, rightClone ast.Expr, op token.Token, pass *analysis.Pass) (*FuncComparisonPayload, bool) { + + funcName, ok := builtinFuncName(origLeft) + if !ok { + return nil, false + } + + if len(origLeft.Args) != 1 { + return nil, false + } + + left := value.GetValuer(origLeft, leftClone, pass) + val := value.GetValuer(origRight, rightClone, pass) + + argType := ComparisonActualArgType + switch funcName { + case "len": + argType |= LenComparisonActualArgType + + if val.IsValueNumeric() { + if val.IsValueZero() { + switch op { + case token.EQL: + argType |= EqualZero + + case token.NEQ, token.GTR: + argType |= GreaterThanZero + } + } else if val.GetValue().String() == "1" && op == token.GEQ { + argType |= GreaterThanZero + } + } + + if !argType.Is(GreaterThanZero) && op != token.EQL && op != token.NEQ { + return nil, false + } + + case "cap": + if op != token.EQL && op != token.NEQ { + return nil, false + } + argType |= CapComparisonActualArgType + + default: + return nil, false + } + + return &FuncComparisonPayload{ + op: op, + argType: argType, + val: val, + left: left, + arg: leftClone.Args[0], + }, true +} + +func (f *FuncComparisonPayload) GetLeft() value.Valuer { + return f.left +} + +func (f *FuncComparisonPayload) GetRight() value.Valuer { + return f.val +} + +func (f *FuncComparisonPayload) ArgType() ArgType { + return f.argType +} + +func (f *FuncComparisonPayload) GetOp() token.Token { + return f.op +} + +func (f *FuncComparisonPayload) GetValue() constant.Value { + return f.val.GetValue() +} + +func (f *FuncComparisonPayload) GetType() gotypes.Type { + return f.val.GetType() +} + +func (f *FuncComparisonPayload) GetValueExpr() ast.Expr { + return f.val.GetValueExpr() +} + +func (f *FuncComparisonPayload) IsError() bool { + return f.val.IsError() +} + +func (f *FuncComparisonPayload) IsValueZero() bool { + return f.val.IsValueZero() +} + +func (f *FuncComparisonPayload) IsFunc() bool { + return true +} + +func (f *FuncComparisonPayload) IsValueNumeric() bool { + return f.val.IsValueNumeric() +} + +func (f *FuncComparisonPayload) IsValueInt() bool { + return f.val.IsValueInt() +} + +func (f *FuncComparisonPayload) IsInterface() bool { + return f.val.IsInterface() +} + +func (f *FuncComparisonPayload) IsPointer() bool { + return f.val.IsPointer() +} + +func (f *FuncComparisonPayload) GetFuncArg() ast.Expr { + return f.arg +} + +type ComparisonArgPayload struct { + left value.Valuer + right value.Valuer + op token.Token +} + +func newComparisonArgPayload(left, right value.Valuer, op token.Token) *ComparisonArgPayload { + return &ComparisonArgPayload{ + left: left, + right: right, + op: op, + } +} + +func (*ComparisonArgPayload) ArgType() ArgType { + return BinaryComparisonActualArgType | ComparisonActualArgType +} + +func (c *ComparisonArgPayload) GetOp() token.Token { + return c.op +} + +func (c *ComparisonArgPayload) GetLeft() value.Valuer { + return c.left +} + +func (c *ComparisonArgPayload) GetRight() value.Valuer { + return c.right +} + +type NilComparisonPayload struct { + val value.Valuer + right value.Valuer + op token.Token +} + +func newNilComparisonPayload(val, right value.Valuer, op token.Token) *NilComparisonPayload { + return &NilComparisonPayload{ + val: val, + right: right, + op: op, + } +} + +func (*NilComparisonPayload) ArgType() ArgType { + return NilComparisonActualArgType +} + +func (n *NilComparisonPayload) GetLeft() value.Valuer { + return n.val +} + +func (n *NilComparisonPayload) GetRight() value.Valuer { + return n.right +} + +func (n *NilComparisonPayload) GetType() gotypes.Type { + return n.val.GetType() +} + +func (n *NilComparisonPayload) GetValue() constant.Value { + return n.val.GetValue() +} + +func (n *NilComparisonPayload) GetValueExpr() ast.Expr { + return n.val.GetValueExpr() +} + +func (n *NilComparisonPayload) IsValueInt() bool { + return n.val.IsValueInt() +} + +func (n *NilComparisonPayload) IsError() bool { + return n.val.IsError() +} + +func (n *NilComparisonPayload) IsValueNumeric() bool { + return n.val.IsValueNumeric() +} + +func (n *NilComparisonPayload) IsFunc() bool { + return n.val.IsFunc() +} + +func (n *NilComparisonPayload) IsValueZero() bool { + return n.val.IsValueZero() +} + +func (n *NilComparisonPayload) IsInterface() bool { + return n.val.IsInterface() +} + +func (n *NilComparisonPayload) IsPointer() bool { + return n.val.IsPointer() +} + +func (n *NilComparisonPayload) GetOp() token.Token { + return n.op +} + +func builtinFuncName(callExpr *ast.CallExpr) (string, bool) { + argFunc, ok := callExpr.Fun.(*ast.Ident) + if !ok { + return "", false + } + + if len(callExpr.Args) != 1 { + return "", false + } + + switch name := argFunc.Name; name { + case "len", "cap", "min", "max": + return name, true + default: + return "", false + } +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go new file mode 100644 index 0000000000..976e726fcf --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go @@ -0,0 +1,315 @@ +package expression + +import ( + "fmt" + "go/ast" + "go/token" + gotypes "go/types" + + "github.com/nunnatsa/ginkgolinter/internal/formatter" + + "github.com/go-toolsmith/astcopy" + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/expression/value" + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" + "github.com/nunnatsa/ginkgolinter/internal/reverseassertion" +) + +type GomegaExpression struct { + orig *ast.CallExpr + clone *ast.CallExpr + + assertionFuncName string + origAssertionFuncName string + actualFuncName string + + isAsync bool + + actual *actual.Actual + matcher *matcher.Matcher + + handler gomegahandler.Handler +} + +func New(origExpr *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, timePkg string) (*GomegaExpression, bool) { + actualMethodName, ok := handler.GetActualFuncName(origExpr) + if !ok || !gomegainfo.IsActualMethod(actualMethodName) { + return nil, false + } + + origSel, ok := origExpr.Fun.(*ast.SelectorExpr) + if !ok || !gomegainfo.IsAssertionFunc(origSel.Sel.Name) { + return &GomegaExpression{ + orig: origExpr, + actualFuncName: actualMethodName, + }, true + } + + exprClone := astcopy.CallExpr(origExpr) + selClone := exprClone.Fun.(*ast.SelectorExpr) + + origActual := handler.GetActualExpr(origSel) + if origActual == nil { + return nil, false + } + + actualClone := handler.GetActualExprClone(origSel, selClone) + if actualClone == nil { + return nil, false + } + + actl, ok := actual.New(origExpr, exprClone, origActual, actualClone, pass, handler, timePkg) + if !ok { + return nil, false + } + + origMatcher, ok := origExpr.Args[0].(*ast.CallExpr) + if !ok { + return nil, false + } + + matcherClone := exprClone.Args[0].(*ast.CallExpr) + + mtchr, ok := matcher.New(origMatcher, matcherClone, pass, handler) + if !ok { + return nil, false + } + + exprClone.Args[0] = mtchr.Clone + + gexp := &GomegaExpression{ + orig: origExpr, + clone: exprClone, + + assertionFuncName: origSel.Sel.Name, + origAssertionFuncName: origSel.Sel.Name, + actualFuncName: actualMethodName, + + isAsync: actl.IsAsync(), + + actual: actl, + matcher: mtchr, + + handler: handler, + } + + if mtchr.ShouldReverseLogic() { + gexp.ReverseAssertionFuncLogic() + } + + return gexp, true +} + +func (e *GomegaExpression) IsMissingAssertion() bool { + return e.matcher == nil +} + +func (e *GomegaExpression) GetActualFuncName() string { + if e == nil { + return "" + } + return e.actualFuncName +} + +func (e *GomegaExpression) GetAssertFuncName() string { + if e == nil { + return "" + } + return e.assertionFuncName +} + +func (e *GomegaExpression) GetOrigAssertFuncName() string { + if e == nil { + return "" + } + return e.origAssertionFuncName +} + +func (e *GomegaExpression) IsAsync() bool { + return e.isAsync +} + +func (e *GomegaExpression) ReverseAssertionFuncLogic() { + assertionFunc := e.clone.Fun.(*ast.SelectorExpr).Sel + newName := reverseassertion.ChangeAssertionLogic(assertionFunc.Name) + assertionFunc.Name = newName + e.assertionFuncName = newName +} + +func (e *GomegaExpression) ReplaceAssertionMethod(name string) { + e.clone.Fun.(*ast.SelectorExpr).Sel.Name = name +} + +func (e *GomegaExpression) ReplaceMatcherFuncName(name string) { + e.matcher.ReplaceMatcherFuncName(name) +} + +func (e *GomegaExpression) ReplaceMatcherArgs(newArgs []ast.Expr) { + e.matcher.ReplaceMatcherArgs(newArgs) +} + +func (e *GomegaExpression) RemoveMatcherArgs() { + e.matcher.ReplaceMatcherArgs(nil) +} + +func (e *GomegaExpression) ReplaceActual(newArg ast.Expr) { + e.actual.ReplaceActual(newArg) +} + +func (e *GomegaExpression) ReplaceActualWithItsFirstArg() { + e.actual.ReplaceActualWithItsFirstArg() +} + +func (e *GomegaExpression) replaceMathcerFuncNoArgs(name string) { + e.matcher.ReplaceMatcherFuncName(name) + e.RemoveMatcherArgs() +} + +func (e *GomegaExpression) SetMatcherBeZero() { + e.replaceMathcerFuncNoArgs("BeZero") +} + +func (e *GomegaExpression) SetMatcherBeEmpty() { + e.replaceMathcerFuncNoArgs("BeEmpty") +} + +func (e *GomegaExpression) SetLenNumericMatcher() { + if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() { + e.SetMatcherBeEmpty() + } else { + e.ReplaceMatcherFuncName("HaveLen") + e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()}) + } +} + +func (e *GomegaExpression) SetLenNumericActual() { + if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() { + e.SetMatcherBeEmpty() + } else { + e.ReplaceMatcherFuncName("HaveLen") + e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()}) + } +} + +func (e *GomegaExpression) SetMatcherLen(arg ast.Expr) { + e.ReplaceMatcherFuncName("HaveLen") + e.ReplaceMatcherArgs([]ast.Expr{arg}) +} + +func (e *GomegaExpression) SetMatcherCap(arg ast.Expr) { + e.ReplaceMatcherFuncName("HaveCap") + e.ReplaceMatcherArgs([]ast.Expr{arg}) +} + +func (e *GomegaExpression) SetMatcherCapZero() { + e.ReplaceMatcherFuncName("HaveCap") + e.ReplaceMatcherArgs([]ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}) +} + +func (e *GomegaExpression) SetMatcherSucceed() { + e.replaceMathcerFuncNoArgs("Succeed") +} + +func (e *GomegaExpression) SetMatcherHaveOccurred() { + e.replaceMathcerFuncNoArgs("HaveOccurred") +} + +func (e *GomegaExpression) SetMatcherBeNil() { + e.replaceMathcerFuncNoArgs("BeNil") +} + +func (e *GomegaExpression) SetMatcherBeTrue() { + e.replaceMathcerFuncNoArgs("BeTrue") +} + +func (e *GomegaExpression) SetMatcherBeFalse() { + e.replaceMathcerFuncNoArgs("BeFalse") +} + +func (e *GomegaExpression) SetMatcherHaveValue() { + newMatcherExp := e.handler.GetNewWrapperMatcher("HaveValue", e.matcher.Clone) + e.clone.Args[0] = newMatcherExp + e.matcher.Clone = newMatcherExp +} + +func (e *GomegaExpression) SetMatcherEqual(arg ast.Expr) { + e.ReplaceMatcherFuncName("Equal") + e.ReplaceMatcherArgs([]ast.Expr{arg}) +} + +func (e *GomegaExpression) SetMatcherBeIdenticalTo(arg ast.Expr) { + e.ReplaceMatcherFuncName("BeIdenticalTo") + e.ReplaceMatcherArgs([]ast.Expr{arg}) +} + +func (e *GomegaExpression) SetMatcherBeNumerically(op token.Token, arg ast.Expr) { + e.ReplaceMatcherFuncName("BeNumerically") + e.ReplaceMatcherArgs([]ast.Expr{ + &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf("%q", op.String())}, + arg, + }) +} + +func (e *GomegaExpression) IsNegativeAssertion() bool { + return reverseassertion.IsNegativeLogic(e.assertionFuncName) +} + +func (e *GomegaExpression) GetClone() *ast.CallExpr { + return e.clone +} + +// Actual proxies: + +func (e *GomegaExpression) GetActualClone() *ast.CallExpr { + return e.actual.Clone +} + +func (e *GomegaExpression) AppendWithArgsToActual() { + e.actual.AppendWithArgsMethod() +} + +func (e *GomegaExpression) GetAsyncActualArg() *actual.AsyncArg { + return e.actual.GetAsyncArg() +} + +func (e *GomegaExpression) GetActualArg() actual.ArgPayload { + return e.actual.Arg +} + +func (e *GomegaExpression) GetActualArgExpr() ast.Expr { + return e.actual.GetActualArg() +} + +func (e *GomegaExpression) GetActualArgGOType() gotypes.Type { + return e.actual.ArgGOType() +} + +func (e *GomegaExpression) ActualArgTypeIs(other actual.ArgType) bool { + return e.actual.Arg.ArgType().Is(other) +} + +func (e *GomegaExpression) IsActualTuple() bool { + return e.actual.IsTuple() +} + +// Matcher proxies + +func (e *GomegaExpression) GetMatcher() *matcher.Matcher { + return e.matcher +} + +func (e *GomegaExpression) GetMatcherInfo() matcher.Info { + return e.matcher.GetMatcherInfo() +} + +func (e *GomegaExpression) MatcherTypeIs(other matcher.Type) bool { + return e.matcher.GetMatcherInfo().Type().Is(other) +} + +func (e *GomegaExpression) FormatOrig(frm *formatter.GoFmtFormatter) string { + return frm.Format(e.orig) +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go new file mode 100644 index 0000000000..24272535dc --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go @@ -0,0 +1,77 @@ +package matcher + +import "github.com/nunnatsa/ginkgolinter/internal/expression/value" + +type BeIdenticalToMatcher struct { + value.Value +} + +func (BeIdenticalToMatcher) Type() Type { + return BeIdenticalToMatcherType +} + +func (BeIdenticalToMatcher) MatcherName() string { + return beIdenticalTo +} + +type BeEquivalentToMatcher struct { + value.Value +} + +func (BeEquivalentToMatcher) Type() Type { + return BeEquivalentToMatcherType +} + +func (BeEquivalentToMatcher) MatcherName() string { + return beEquivalentTo +} + +type BeZeroMatcher struct{} + +func (BeZeroMatcher) Type() Type { + return BeZeroMatcherType +} + +func (BeZeroMatcher) MatcherName() string { + return beZero +} + +type BeEmptyMatcher struct{} + +func (BeEmptyMatcher) Type() Type { + return BeEmptyMatcherType +} + +func (BeEmptyMatcher) MatcherName() string { + return beEmpty +} + +type BeTrueMatcher struct{} + +func (BeTrueMatcher) Type() Type { + return BeTrueMatcherType | BoolValueTrue +} + +func (BeTrueMatcher) MatcherName() string { + return beTrue +} + +type BeFalseMatcher struct{} + +func (BeFalseMatcher) Type() Type { + return BeFalseMatcherType | BoolValueFalse +} + +func (BeFalseMatcher) MatcherName() string { + return beFalse +} + +type BeNilMatcher struct{} + +func (BeNilMatcher) Type() Type { + return BeNilMatcherType +} + +func (BeNilMatcher) MatcherName() string { + return beNil +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go new file mode 100644 index 0000000000..8683f02918 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go @@ -0,0 +1,128 @@ +package matcher + +import ( + "go/ast" + "go/constant" + "go/token" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" +) + +type BeNumericallyMatcher struct { + op token.Token + value value.Valuer + argType Type +} + +var compareOps = map[string]token.Token{ + `"=="`: token.EQL, + `"<"`: token.LSS, + `">"`: token.GTR, + `"="`: token.ASSIGN, + `"!="`: token.NEQ, + `"<="`: token.LEQ, + `">="`: token.GEQ, +} + +func getCompareOp(opExp ast.Expr) token.Token { + basic, ok := opExp.(*ast.BasicLit) + if !ok { + return token.ILLEGAL + } + if basic.Kind != token.STRING { + return token.ILLEGAL + } + + if tk, ok := compareOps[basic.Value]; ok { + return tk + } + + return token.ILLEGAL +} + +func newBeNumericallyMatcher(opExp, orig, clone ast.Expr, pass *analysis.Pass) Info { + op := getCompareOp(opExp) + if op == token.ILLEGAL { + return &UnspecifiedMatcher{ + matcherName: beNumerically, + } + } + + val := value.GetValuer(orig, clone, pass) + argType := BeNumericallyMatcherType + + if val.IsValueNumeric() { + if v := val.GetValue().String(); v == "0" { + switch op { + case token.EQL: + argType |= EqualZero + + case token.NEQ, token.GTR: + argType |= GreaterThanZero + } + } else if v == "1" && op == token.GEQ { + argType |= GreaterThanZero + } + } + + return &BeNumericallyMatcher{ + op: op, + value: val, + argType: argType, + } +} + +func (m BeNumericallyMatcher) Type() Type { + return m.argType +} + +func (BeNumericallyMatcher) MatcherName() string { + return beNumerically +} + +func (m BeNumericallyMatcher) GetValueExpr() ast.Expr { + return m.value.GetValueExpr() +} + +func (m BeNumericallyMatcher) GetValue() constant.Value { + return m.value.GetValue() +} + +func (m BeNumericallyMatcher) GetType() gotypes.Type { + return m.value.GetType() +} + +func (m BeNumericallyMatcher) GetOp() token.Token { + return m.op +} + +func (m BeNumericallyMatcher) IsValueZero() bool { + return m.value.IsValueZero() +} + +func (m BeNumericallyMatcher) IsValueInt() bool { + return m.value.IsValueInt() +} + +func (m BeNumericallyMatcher) IsValueNumeric() bool { + return m.value.IsValueNumeric() +} + +func (m BeNumericallyMatcher) IsError() bool { + return m.value.IsError() +} + +func (m BeNumericallyMatcher) IsFunc() bool { + return m.value.IsFunc() +} + +func (m BeNumericallyMatcher) IsInterface() bool { + return m.value.IsInterface() +} + +func (m BeNumericallyMatcher) IsPointer() bool { + return m.value.IsPointer() +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go new file mode 100644 index 0000000000..8cee8e408e --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go @@ -0,0 +1,124 @@ +package matcher + +import ( + "go/ast" + "go/constant" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" +) + +func newEqualMatcher(orig, clone ast.Expr, pass *analysis.Pass) Info { + t := pass.TypesInfo.Types[orig] + + if t.Value != nil { + if t.Value.Kind() == constant.Bool { + if t.Value.String() == "true" { + return &EqualTrueMatcher{} + } + return &EqualFalseMatcher{} + } + } + + if value.IsNil(orig, pass) { + return &EqualNilMatcher{ + gotype: pass.TypesInfo.TypeOf(orig), + } + } + + val := value.GetValuer(orig, clone, pass) + + return &EqualMatcher{ + val: val, + } +} + +type EqualMatcher struct { + val value.Valuer +} + +func (EqualMatcher) Type() Type { + return EqualMatcherType +} + +func (EqualMatcher) MatcherName() string { + return equal +} + +func (m EqualMatcher) GetValue() constant.Value { + return m.val.GetValue() +} + +func (m EqualMatcher) GetType() gotypes.Type { + return m.val.GetType() +} + +func (m EqualMatcher) GetValueExpr() ast.Expr { + return m.val.GetValueExpr() +} + +func (m EqualMatcher) IsValueZero() bool { + return m.val.IsValueZero() +} + +func (m EqualMatcher) IsValueInt() bool { + return m.val.IsValueInt() +} + +func (m EqualMatcher) IsValueNumeric() bool { + return m.val.IsValueNumeric() +} + +func (m EqualMatcher) IsError() bool { + return m.val.IsError() +} + +func (m EqualMatcher) IsFunc() bool { + return m.val.IsFunc() +} + +func (m EqualMatcher) IsInterface() bool { + return m.val.IsInterface() +} + +func (m EqualMatcher) IsPointer() bool { + return m.val.IsPointer() +} + +type EqualNilMatcher struct { + gotype gotypes.Type +} + +func (EqualNilMatcher) Type() Type { + return EqualNilMatcherType | EqualMatcherType | EqualValueMatcherType +} + +func (EqualNilMatcher) MatcherName() string { + return equal +} + +func (n EqualNilMatcher) GetType() gotypes.Type { + return n.gotype +} + +type EqualTrueMatcher struct{} + +func (EqualTrueMatcher) Type() Type { + return EqualMatcherType | EqualBoolValueMatcherType | BoolValueTrue +} + +func (EqualTrueMatcher) MatcherName() string { + return equal +} + +type EqualFalseMatcher struct{} + +func (EqualFalseMatcher) Type() Type { + return EqualMatcherType | EqualBoolValueMatcherType | BoolValueFalse +} + +func (EqualFalseMatcher) MatcherName() string { + return equal +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go new file mode 100644 index 0000000000..a493287e01 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go @@ -0,0 +1,199 @@ +package matcher + +import ( + "go/ast" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" + "github.com/nunnatsa/ginkgolinter/internal/interfaces" +) + +type HaveOccurredMatcher struct{} + +func (m *HaveOccurredMatcher) Type() Type { + return HaveOccurredMatcherType +} +func (m *HaveOccurredMatcher) MatcherName() string { + return haveOccurred +} + +type SucceedMatcher struct{} + +func (m *SucceedMatcher) Type() Type { + return SucceedMatcherType +} +func (m *SucceedMatcher) MatcherName() string { + return succeed +} + +type MatchErrorMatcher interface { + Info + AllowedNumArgs() int + NumArgs() int +} + +type InvalidMatchErrorMatcher struct { + firstAgr ast.Expr + numArgs int +} + +func (m *InvalidMatchErrorMatcher) Type() Type { + return MatchErrorMatcherType +} + +func (m *InvalidMatchErrorMatcher) MatcherName() string { + return matchError +} + +func (m *InvalidMatchErrorMatcher) AllowedNumArgs() int { + return 1 +} + +func (m *InvalidMatchErrorMatcher) NumArgs() int { + return m.numArgs +} + +func (m *InvalidMatchErrorMatcher) GetValueExpr() ast.Expr { + return m.firstAgr +} + +type MatchErrorMatcherWithErr struct { + numArgs int +} + +func (m *MatchErrorMatcherWithErr) Type() Type { + return MatchErrorMatcherType | ErrMatchWithErr +} + +func (m *MatchErrorMatcherWithErr) MatcherName() string { + return matchError +} + +func (m *MatchErrorMatcherWithErr) AllowedNumArgs() int { + return 1 +} + +func (m *MatchErrorMatcherWithErr) NumArgs() int { + return m.numArgs +} + +type MatchErrorMatcherWithErrFunc struct { + numArgs int + secondArgIsString bool +} + +func (m *MatchErrorMatcherWithErrFunc) Type() Type { + return MatchErrorMatcherType | ErrMatchWithErrFunc +} + +func (m *MatchErrorMatcherWithErrFunc) MatcherName() string { + return matchError +} + +func (m *MatchErrorMatcherWithErrFunc) AllowedNumArgs() int { + return 2 +} + +func (m *MatchErrorMatcherWithErrFunc) NumArgs() int { + return m.numArgs +} + +func (m *MatchErrorMatcherWithErrFunc) IsSecondArgString() bool { + return m.secondArgIsString +} + +type MatchErrorMatcherWithString struct { + numArgs int +} + +func (m *MatchErrorMatcherWithString) Type() Type { + return MatchErrorMatcherType | ErrMatchWithString +} + +func (m *MatchErrorMatcherWithString) MatcherName() string { + return matchError +} + +func (m *MatchErrorMatcherWithString) AllowedNumArgs() int { + return 1 +} + +func (m *MatchErrorMatcherWithString) NumArgs() int { + return m.numArgs +} + +type MatchErrorMatcherWithMatcher struct { + numArgs int +} + +func (m *MatchErrorMatcherWithMatcher) Type() Type { + return MatchErrorMatcherType | ErrMatchWithMatcher +} + +func (m *MatchErrorMatcherWithMatcher) MatcherName() string { + return matchError +} + +func (m *MatchErrorMatcherWithMatcher) AllowedNumArgs() int { + return 1 +} + +func (m *MatchErrorMatcherWithMatcher) NumArgs() int { + return m.numArgs +} + +func newMatchErrorMatcher(args []ast.Expr, pass *analysis.Pass) MatchErrorMatcher { + numArgs := len(args) + if value.IsExprError(pass, args[0]) { + return &MatchErrorMatcherWithErr{numArgs: numArgs} + } + + t := pass.TypesInfo.TypeOf(args[0]) + if isString(args[0], pass) { + return &MatchErrorMatcherWithString{numArgs: numArgs} + } + + if interfaces.ImplementsGomegaMatcher(t) { + return &MatchErrorMatcherWithMatcher{numArgs: numArgs} + } + + if isFuncErrBool(t) { + isString := false + if numArgs > 1 { + t2 := pass.TypesInfo.TypeOf(args[1]) + isString = gotypes.Identical(t2, gotypes.Typ[gotypes.String]) + } + return &MatchErrorMatcherWithErrFunc{numArgs: numArgs, secondArgIsString: isString} + } + + return &InvalidMatchErrorMatcher{numArgs: numArgs} +} + +func isString(exp ast.Expr, pass *analysis.Pass) bool { + t := pass.TypesInfo.TypeOf(exp) + return gotypes.Identical(t, gotypes.Typ[gotypes.String]) +} + +// isFuncErrBool checks if a function is with the signature `func(error) bool` +func isFuncErrBool(t gotypes.Type) bool { + sig, ok := t.(*gotypes.Signature) + if !ok { + return false + } + if sig.Params().Len() != 1 || sig.Results().Len() != 1 { + return false + } + + if !interfaces.ImplementsError(sig.Params().At(0).Type()) { + return false + } + + b, ok := sig.Results().At(0).Type().(*gotypes.Basic) + if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool { + return true + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go new file mode 100644 index 0000000000..8e4f438e87 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go @@ -0,0 +1,11 @@ +package matcher + +type HaveLenZeroMatcher struct{} + +func (HaveLenZeroMatcher) Type() Type { + return HaveLenZeroMatcherType +} + +func (HaveLenZeroMatcher) MatcherName() string { + return haveLen +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go new file mode 100644 index 0000000000..0969b95519 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go @@ -0,0 +1,86 @@ +package matcher + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" +) + +const ( // gomega matchers + beEmpty = "BeEmpty" + beEquivalentTo = "BeEquivalentTo" + beFalse = "BeFalse" + beIdenticalTo = "BeIdenticalTo" + beNil = "BeNil" + beNumerically = "BeNumerically" + beTrue = "BeTrue" + beZero = "BeZero" + equal = "Equal" + haveLen = "HaveLen" + haveValue = "HaveValue" + and = "And" + or = "Or" + withTransform = "WithTransform" + matchError = "MatchError" + haveOccurred = "HaveOccurred" + succeed = "Succeed" +) + +type Matcher struct { + funcName string + Orig *ast.CallExpr + Clone *ast.CallExpr + info Info + reverseLogic bool + handler gomegahandler.Handler +} + +func New(origMatcher, matcherClone *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) { + reverse := false + var assertFuncName string + for { + ok := false + assertFuncName, ok = handler.GetActualFuncName(origMatcher) + if !ok { + return nil, false + } + + if assertFuncName != "Not" { + break + } + + reverse = !reverse + origMatcher, ok = origMatcher.Args[0].(*ast.CallExpr) + if !ok { + return nil, false + } + matcherClone = matcherClone.Args[0].(*ast.CallExpr) + } + + return &Matcher{ + funcName: assertFuncName, + Orig: origMatcher, + Clone: matcherClone, + info: getMatcherInfo(origMatcher, matcherClone, assertFuncName, pass, handler), + reverseLogic: reverse, + handler: handler, + }, true +} + +func (m *Matcher) ShouldReverseLogic() bool { + return m.reverseLogic +} + +func (m *Matcher) GetMatcherInfo() Info { + return m.info +} + +func (m *Matcher) ReplaceMatcherFuncName(name string) { + m.handler.ReplaceFunction(m.Clone, ast.NewIdent(name)) +} + +func (m *Matcher) ReplaceMatcherArgs(newArgs []ast.Expr) { + m.Clone.Args = newArgs +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go new file mode 100644 index 0000000000..084226bcca --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go @@ -0,0 +1,148 @@ +package matcher + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/expression/value" + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" +) + +type Type uint64 + +const ( + Unspecified Type = 1 << iota + EqualMatcherType + BeZeroMatcherType + BeEmptyMatcherType + BeTrueMatcherType + BeFalseMatcherType + BeNumericallyMatcherType + HaveLenZeroMatcherType + BeEquivalentToMatcherType + BeIdenticalToMatcherType + BeNilMatcherType + MatchErrorMatcherType + MultipleMatcherMatherType + HaveValueMatherType + WithTransformMatherType + EqualBoolValueMatcherType + EqualValueMatcherType + HaveOccurredMatcherType + SucceedMatcherType + EqualNilMatcherType + + BoolValueFalse + BoolValueTrue + + OrMatherType + AndMatherType + + ErrMatchWithErr + ErrMatchWithErrFunc + ErrMatchWithString + ErrMatchWithMatcher + + EqualZero + GreaterThanZero +) + +type Info interface { + Type() Type + MatcherName() string +} + +func getMatcherInfo(orig, clone *ast.CallExpr, matcherName string, pass *analysis.Pass, handler gomegahandler.Handler) Info { + switch matcherName { + case equal: + return newEqualMatcher(orig.Args[0], clone.Args[0], pass) + + case beZero: + return &BeZeroMatcher{} + + case beEmpty: + return &BeEmptyMatcher{} + + case beTrue: + return &BeTrueMatcher{} + + case beFalse: + return &BeFalseMatcher{} + + case beNil: + return &BeNilMatcher{} + + case beNumerically: + if len(orig.Args) == 2 { + return newBeNumericallyMatcher(orig.Args[0], orig.Args[1], clone.Args[1], pass) + } + + case haveLen: + if value.GetValuer(orig.Args[0], clone.Args[0], pass).IsValueZero() { + return &HaveLenZeroMatcher{} + } + + case beEquivalentTo: + return &BeEquivalentToMatcher{ + Value: value.New(orig.Args[0], clone.Args[0], pass), + } + + case beIdenticalTo: + return &BeIdenticalToMatcher{ + Value: value.New(orig.Args[0], clone.Args[0], pass), + } + + case matchError: + return newMatchErrorMatcher(orig.Args, pass) + + case haveValue: + if nestedMatcher, ok := getNestedMatcher(orig, clone, 0, pass, handler); ok { + return &HaveValueMatcher{ + nested: nestedMatcher, + } + } + + case withTransform: + if nestedMatcher, ok := getNestedMatcher(orig, clone, 1, pass, handler); ok { + return newWithTransformMatcher(orig.Args[0], nestedMatcher, pass) + } + + case or, and: + matcherType := MultipleMatcherMatherType + if matcherName == or { + matcherType |= OrMatherType + } else { + matcherType |= AndMatherType + } + + if m, ok := newMultipleMatchersMatcher(matcherType, orig.Args, clone.Args, pass, handler); ok { + return m + } + + case succeed: + return &SucceedMatcher{} + + case haveOccurred: + return &HaveOccurredMatcher{} + + } + + return &UnspecifiedMatcher{matcherName: matcherName} +} + +type UnspecifiedMatcher struct { + matcherName string +} + +func (UnspecifiedMatcher) Type() Type { + return Unspecified +} + +func (u UnspecifiedMatcher) MatcherName() string { + return u.matcherName +} + +func (t Type) Is(other Type) bool { + return t&other != 0 +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go new file mode 100644 index 0000000000..cc26e5ac2c --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go @@ -0,0 +1,66 @@ +package matcher + +import ( + "go/ast" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" +) + +type HaveValueMatcher struct { + nested *Matcher +} + +func (m *HaveValueMatcher) Type() Type { + return HaveValueMatherType +} +func (m *HaveValueMatcher) MatcherName() string { + return haveValue +} + +func (m *HaveValueMatcher) GetNested() *Matcher { + return m.nested +} + +type WithTransformMatcher struct { + funcType gotypes.Type + nested *Matcher +} + +func (m *WithTransformMatcher) Type() Type { + return WithTransformMatherType +} +func (m *WithTransformMatcher) MatcherName() string { + return withTransform +} + +func (m *WithTransformMatcher) GetNested() *Matcher { + return m.nested +} + +func (m *WithTransformMatcher) GetFuncType() gotypes.Type { + return m.funcType +} + +func getNestedMatcher(orig, clone *ast.CallExpr, offset int, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) { + if origNested, ok := orig.Args[offset].(*ast.CallExpr); ok { + cloneNested := clone.Args[offset].(*ast.CallExpr) + + return New(origNested, cloneNested, pass, handler) + } + + return nil, false +} + +func newWithTransformMatcher(fun ast.Expr, nested *Matcher, pass *analysis.Pass) *WithTransformMatcher { + funcType := pass.TypesInfo.TypeOf(fun) + if sig, ok := funcType.(*gotypes.Signature); ok && sig.Results().Len() > 0 { + funcType = sig.Results().At(0).Type() + } + return &WithTransformMatcher{ + funcType: funcType, + nested: nested, + } +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go new file mode 100644 index 0000000000..9ce0cf5b83 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go @@ -0,0 +1,62 @@ +package matcher + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" +) + +type MultipleMatchersMatcher struct { + matherType Type + matchers []*Matcher +} + +func (m *MultipleMatchersMatcher) Type() Type { + return m.matherType +} + +func (m *MultipleMatchersMatcher) MatcherName() string { + if m.matherType.Is(OrMatherType) { + return or + } + return and +} + +func newMultipleMatchersMatcher(matherType Type, orig, clone []ast.Expr, pass *analysis.Pass, handler gomegahandler.Handler) (*MultipleMatchersMatcher, bool) { + matchers := make([]*Matcher, len(orig)) + + for i := range orig { + nestedOrig, ok := orig[i].(*ast.CallExpr) + if !ok { + return nil, false + } + + m, ok := New(nestedOrig, clone[i].(*ast.CallExpr), pass, handler) + if !ok { + return nil, false + } + + m.reverseLogic = false + + matchers[i] = m + } + + return &MultipleMatchersMatcher{ + matherType: matherType, + matchers: matchers, + }, true +} + +func (m *MultipleMatchersMatcher) Len() int { + return len(m.matchers) +} + +func (m *MultipleMatchersMatcher) At(i int) *Matcher { + if i >= len(m.matchers) { + panic("index out of range") + } + + return m.matchers[i] +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go new file mode 100644 index 0000000000..dda0dd73ba --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go @@ -0,0 +1,221 @@ +package value + +import ( + "go/ast" + "go/constant" + gotypes "go/types" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/interfaces" +) + +type Valuer interface { + GetValue() constant.Value + GetType() gotypes.Type + GetValueExpr() ast.Expr + IsValueZero() bool + IsValueInt() bool + IsValueNumeric() bool + IsError() bool + IsFunc() bool + IsInterface() bool + IsPointer() bool +} + +func GetValuer(orig, clone ast.Expr, pass *analysis.Pass) Valuer { + val := New(orig, clone, pass) + unspecified := UnspecifiedValue{ + Value: val, + } + + if orig == nil { + return unspecified + } + + if IsExprError(pass, orig) { + return &ErrValue{ + Value: val, + err: clone, + } + } + + if val.GetValue() == nil || !val.tv.IsValue() { + return unspecified + } + + if val.GetValue().Kind() == constant.Int { + num, ok := constant.Int64Val(val.GetValue()) + if !ok { + return unspecified + } + return &IntValue{ + Value: val, + val: num, + } + } + + return unspecified +} + +type Value struct { + expr ast.Expr + tv gotypes.TypeAndValue +} + +func New(orig, clone ast.Expr, pass *analysis.Pass) Value { + tv := pass.TypesInfo.Types[orig] + + return Value{ + expr: clone, + tv: tv, + } +} + +func (v Value) GetValueExpr() ast.Expr { + return v.expr +} + +func (v Value) GetValue() constant.Value { + return v.tv.Value +} + +func (v Value) GetType() gotypes.Type { + return v.tv.Type +} + +func (v Value) IsInterface() bool { + return gotypes.IsInterface(v.tv.Type) +} + +func (v Value) IsPointer() bool { + return Is[*gotypes.Pointer](v.tv.Type) +} + +func (v Value) IsNil() bool { + return v.tv.IsNil() +} + +type UnspecifiedValue struct { + Value +} + +func (u UnspecifiedValue) IsValueZero() bool { + return false +} + +func (u UnspecifiedValue) IsValueInt() bool { + return false +} + +func (u UnspecifiedValue) IsValueNumeric() bool { + return false +} + +func (u UnspecifiedValue) IsError() bool { + return false +} + +func (u UnspecifiedValue) IsFunc() bool { + return isFunc(u.GetValueExpr()) +} + +type ErrValue struct { + Value + err ast.Expr +} + +func (e ErrValue) IsValueZero() bool { + return false +} + +func (e ErrValue) IsValueInt() bool { + return false +} + +func (e ErrValue) IsValueNumeric() bool { + return false +} + +func (e ErrValue) IsError() bool { + return true +} + +func (e ErrValue) IsFunc() bool { + return isFunc(e.GetValueExpr()) +} + +type IntValuer interface { + GetIntValue() int64 +} + +type IntValue struct { + Value + val int64 +} + +func (i IntValue) IsValueZero() bool { + return i.val == 0 +} + +func (i IntValue) IsValueInt() bool { + return i.val == 0 +} + +func (i IntValue) IsValueNumeric() bool { + return true +} + +func (i IntValue) IsError() bool { + return false +} + +func (i IntValue) IsFunc() bool { + return false +} + +func (i IntValue) GetIntValue() int64 { + return i.val +} + +func isFunc(exp ast.Expr) bool { + return Is[*ast.CallExpr](exp) +} + +func Is[T any](x any) bool { + _, matchType := x.(T) + return matchType +} + +func IsExprError(pass *analysis.Pass, expr ast.Expr) bool { + actualArgType := pass.TypesInfo.TypeOf(expr) + switch t := actualArgType.(type) { + case *gotypes.Named: + return interfaces.ImplementsError(actualArgType) + + case *gotypes.Pointer: + if tt, ok := t.Elem().(*gotypes.Named); ok { + return interfaces.ImplementsError(tt) + } + + case *gotypes.Tuple: + if t.Len() > 0 { + switch t0 := t.At(0).Type().(type) { + case *gotypes.Named, *gotypes.Pointer: + if interfaces.ImplementsError(t0) { + return true + } + } + } + } + return false +} + +func IsNil(exp ast.Expr, pass *analysis.Pass) bool { + id, ok := exp.(*ast.Ident) + if !ok { + return false + } + + return pass.TypesInfo.Types[id].IsNil() +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go new file mode 100644 index 0000000000..64f3d99ad6 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go @@ -0,0 +1,22 @@ +package formatter + +import ( + "bytes" + "go/ast" + "go/printer" + "go/token" +) + +type GoFmtFormatter struct { + fset *token.FileSet +} + +func NewGoFmtFormatter(fset *token.FileSet) *GoFmtFormatter { + return &GoFmtFormatter{fset: fset} +} + +func (f GoFmtFormatter) Format(exp ast.Expr) string { + var buf bytes.Buffer + _ = printer.Fprint(&buf, f.fset, exp) + return buf.String() +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go new file mode 100644 index 0000000000..9c54b43346 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go @@ -0,0 +1,36 @@ +package ginkgohandler + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/types" +) + +// dotHandler is used when importing ginkgo with dot; i.e. +// import . "github.com/onsi/ginkgo" +type dotHandler struct{} + +func (h dotHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool { + return handleGinkgoSpecs(expr, config, pass, h) +} + +func (h dotHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { + if fun, ok := exp.Fun.(*ast.Ident); ok { + return isFocusContainer(fun.Name), fun + } + return false, nil +} + +func (h dotHandler) isWrapContainer(exp *ast.CallExpr) bool { + if fun, ok := exp.Fun.(*ast.Ident); ok { + return isWrapContainer(fun.Name) + } + return false +} + +func (h dotHandler) isFocusSpec(exp ast.Expr) bool { + id, ok := exp.(*ast.Ident) + return ok && id.Name == focusSpec +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go new file mode 100644 index 0000000000..d8bb753992 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go @@ -0,0 +1,63 @@ +package ginkgohandler + +const ( // container names + describe = "Describe" + pdescribe = "PDescribe" + xdescribe = "XDescribe" + fdescribe = "FDescribe" + + when = "When" + pwhen = "PWhen" + xwhen = "XWhen" + fwhen = "FWhen" + + contextContainer = "Context" + pcontext = "PContext" + xcontext = "XContext" + fcontext = "FContext" + + it = "It" + pit = "PIt" + xit = "XIt" + fit = "FIt" + + describeTable = "DescribeTable" + pdescribeTable = "PDescribeTable" + xdescribeTable = "XDescribeTable" + fdescribeTable = "FDescribeTable" + + entry = "Entry" + pentry = "PEntry" + xentry = "XEntry" + fentry = "FEntry" +) + +func isFocusContainer(name string) bool { + switch name { + case fdescribe, fcontext, fwhen, fit, fdescribeTable, fentry: + return true + } + return false +} + +func isContainer(name string) bool { + switch name { + case it, when, contextContainer, describe, describeTable, entry, + pit, pwhen, pcontext, pdescribe, pdescribeTable, pentry, + xit, xwhen, xcontext, xdescribe, xdescribeTable, xentry: + return true + } + return isFocusContainer(name) +} + +func isWrapContainer(name string) bool { + switch name { + case when, contextContainer, describe, + fwhen, fcontext, fdescribe, + pwhen, pcontext, pdescribe, + xwhen, xcontext, xdescribe: + return true + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go index f10d831840..c44e3e8d8c 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go @@ -2,6 +2,10 @@ package ginkgohandler import ( "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/types" ) const ( @@ -14,116 +18,31 @@ const ( // Handler provide different handling, depend on the way ginkgo was imported, whether // in imported with "." name, custom name or without any name. type Handler interface { - GetFocusContainerName(*ast.CallExpr) (bool, *ast.Ident) - IsWrapContainer(*ast.CallExpr) bool - IsFocusSpec(ident ast.Expr) bool + HandleGinkgoSpecs(ast.Expr, types.Config, *analysis.Pass) bool + getFocusContainerName(*ast.CallExpr) (bool, *ast.Ident) + isWrapContainer(*ast.CallExpr) bool + isFocusSpec(ident ast.Expr) bool } // GetGinkgoHandler returns a ginkgor handler according to the way ginkgo was imported in the specific file func GetGinkgoHandler(file *ast.File) Handler { for _, imp := range file.Imports { - if imp.Path.Value != importPath && imp.Path.Value != importPathV2 { - continue - } + switch imp.Path.Value { + + case importPath, importPathV2: + switch name := imp.Name.String(); { + case name == ".": + return dotHandler{} + case name == "": // import with no local name + return nameHandler("ginkgo") + default: + return nameHandler(name) + } - switch name := imp.Name.String(); { - case name == ".": - return dotHandler{} - case name == "": // import with no local name - return nameHandler("ginkgo") default: - return nameHandler(name) - } - } - - return nil // no ginkgo import; this file does not use ginkgo -} - -// dotHandler is used when importing ginkgo with dot; i.e. -// import . "github.com/onsi/ginkgo" -type dotHandler struct{} - -func (h dotHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { - if fun, ok := exp.Fun.(*ast.Ident); ok { - return isFocusContainer(fun.Name), fun - } - return false, nil -} - -func (h dotHandler) IsWrapContainer(exp *ast.CallExpr) bool { - if fun, ok := exp.Fun.(*ast.Ident); ok { - return IsWrapContainer(fun.Name) - } - return false -} - -func (h dotHandler) IsFocusSpec(exp ast.Expr) bool { - id, ok := exp.(*ast.Ident) - return ok && id.Name == focusSpec -} - -// nameHandler is used when importing ginkgo without name; i.e. -// import "github.com/onsi/ginkgo" -// -// or with a custom name; e.g. -// import customname "github.com/onsi/ginkgo" -type nameHandler string - -func (h nameHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { - if sel, ok := exp.Fun.(*ast.SelectorExpr); ok { - if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) { - return isFocusContainer(sel.Sel.Name), sel.Sel - } - } - return false, nil -} - -func (h nameHandler) IsWrapContainer(exp *ast.CallExpr) bool { - if sel, ok := exp.Fun.(*ast.SelectorExpr); ok { - if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) { - return IsWrapContainer(sel.Sel.Name) - } - } - return false - -} - -func (h nameHandler) IsFocusSpec(exp ast.Expr) bool { - if selExp, ok := exp.(*ast.SelectorExpr); ok { - if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) { - return selExp.Sel.Name == focusSpec + continue } } - return false -} - -func isFocusContainer(name string) bool { - switch name { - case "FDescribe", "FContext", "FWhen", "FIt", "FDescribeTable", "FEntry": - return true - } - return false -} - -func IsContainer(name string) bool { - switch name { - case "It", "When", "Context", "Describe", "DescribeTable", "Entry", - "PIt", "PWhen", "PContext", "PDescribe", "PDescribeTable", "PEntry", - "XIt", "XWhen", "XContext", "XDescribe", "XDescribeTable", "XEntry": - return true - } - return isFocusContainer(name) -} - -func IsWrapContainer(name string) bool { - switch name { - case "When", "Context", "Describe", - "FWhen", "FContext", "FDescribe", - "PWhen", "PContext", "PDescribe", - "XWhen", "XContext", "XDescribe": - return true - } - - return false + return nil } diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go new file mode 100644 index 0000000000..4b6de57679 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go @@ -0,0 +1,195 @@ +package ginkgohandler + +import ( + "fmt" + "go/ast" + "go/token" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/types" +) + +const ( + linterName = "ginkgo-linter" + focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q" + focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it" + useBeforeEachTemplate = "use BeforeEach() to assign variable %s" +) + +func handleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass, ginkgoHndlr Handler) bool { + goDeeper := false + if exp, ok := expr.(*ast.CallExpr); ok { + if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) { + goDeeper = true + } + + if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) { + goDeeper = true + } + } + return goDeeper +} + +func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr Handler, exp *ast.CallExpr) bool { + foundSomething := false + if ginkgoHndlr.isWrapContainer(exp) { + for _, arg := range exp.Args { + if fn, ok := arg.(*ast.FuncLit); ok { + if fn.Body != nil { + if checkAssignments(pass, fn.Body.List) { + foundSomething = true + } + break + } + } + } + } + + return foundSomething +} + +func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool { + foundSomething := false + for _, stmt := range list { + switch st := stmt.(type) { + case *ast.DeclStmt: + if checkAssignmentDecl(pass, st) { + foundSomething = true + } + + case *ast.AssignStmt: + if checkAssignmentAssign(pass, st) { + foundSomething = true + } + + case *ast.IfStmt: + if checkAssignmentIf(pass, st) { + foundSomething = true + } + } + } + + return foundSomething +} + +func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool { + foundSomething := false + for i, val := range values { + if !is[*ast.FuncLit](val) { + reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name) + foundSomething = true + } + } + + return foundSomething +} + +func checkAssignmentDecl(pass *analysis.Pass, ds *ast.DeclStmt) bool { + foundSomething := false + if gen, ok := ds.Decl.(*ast.GenDecl); ok { + if gen.Tok != token.VAR { + return false + } + for _, spec := range gen.Specs { + if valSpec, ok := spec.(*ast.ValueSpec); ok { + if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) { + foundSomething = true + } + } + } + } + + return foundSomething +} + +func checkAssignmentAssign(pass *analysis.Pass, as *ast.AssignStmt) bool { + foundSomething := false + for i, val := range as.Rhs { + if !is[*ast.FuncLit](val) { + if id, isIdent := as.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" { + reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name) + foundSomething = true + } + } + } + return foundSomething +} + +func checkAssignmentIf(pass *analysis.Pass, is *ast.IfStmt) bool { + foundSomething := false + + if is.Body != nil { + if checkAssignments(pass, is.Body.List) { + foundSomething = true + } + } + if is.Else != nil { + if block, isBlock := is.Else.(*ast.BlockStmt); isBlock { + if checkAssignments(pass, block.List) { + foundSomething = true + } + } + } + + return foundSomething +} + +func checkFocusContainer(pass *analysis.Pass, handler Handler, exp *ast.CallExpr) bool { + foundFocus := false + isFocus, id := handler.getFocusContainerName(exp) + if isFocus { + reportNewName(pass, id, id.Name[1:], id.Name) + foundFocus = true + } + + if id != nil && isContainer(id.Name) { + for _, arg := range exp.Args { + if handler.isFocusSpec(arg) { + reportNoFix(pass, arg.Pos(), focusSpecFound) + foundFocus = true + } else if callExp, ok := arg.(*ast.CallExpr); ok { + if checkFocusContainer(pass, handler, callExp) { // handle table entries + foundFocus = true + } + } + } + } + + return foundFocus +} + +func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, oldExpr string) { + pass.Report(analysis.Diagnostic{ + Pos: id.Pos(), + Message: fmt.Sprintf(focusContainerFound, newName), + SuggestedFixes: []analysis.SuggestedFix{ + { + Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName), + TextEdits: []analysis.TextEdit{ + { + Pos: id.Pos(), + End: id.End(), + NewText: []byte(newName), + }, + }, + }, + }, + }) +} + +func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) { + if len(args) > 0 { + message = fmt.Sprintf(message, args...) + } + + pass.Report(analysis.Diagnostic{ + Pos: pos, + Message: message, + }) +} + +func is[T any](x any) bool { + _, matchType := x.(T) + return matchType +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go new file mode 100644 index 0000000000..2ef9fe703c --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go @@ -0,0 +1,49 @@ +package ginkgohandler + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/types" +) + +// nameHandler is used when importing ginkgo without name; i.e. +// import "github.com/onsi/ginkgo" +// +// or with a custom name; e.g. +// import customname "github.com/onsi/ginkgo" +type nameHandler string + +func (h nameHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool { + return handleGinkgoSpecs(expr, config, pass, h) +} + +func (h nameHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) { + if sel, ok := exp.Fun.(*ast.SelectorExpr); ok { + if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) { + return isFocusContainer(sel.Sel.Name), sel.Sel + } + } + return false, nil +} + +func (h nameHandler) isWrapContainer(exp *ast.CallExpr) bool { + if sel, ok := exp.Fun.(*ast.SelectorExpr); ok { + if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) { + return isWrapContainer(sel.Sel.Name) + } + } + return false + +} + +func (h nameHandler) isFocusSpec(exp ast.Expr) bool { + if selExp, ok := exp.(*ast.SelectorExpr); ok { + if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) { + return selExp.Sel.Name == focusSpec + } + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go new file mode 100644 index 0000000000..bd3b93992f --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go @@ -0,0 +1,99 @@ +package gomegahandler + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" +) + +// dotHandler is used when importing gomega with dot; i.e. +// import . "github.com/onsi/gomega" +type dotHandler struct { + pass *analysis.Pass +} + +// GetActualFuncName returns the name of the gomega function, e.g. `Expect` +func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { + switch actualFunc := expr.Fun.(type) { + case *ast.Ident: + return actualFunc.Name, true + case *ast.SelectorExpr: + if h.isGomegaVar(actualFunc.X) { + return actualFunc.Sel.Name, true + } + + if x, ok := actualFunc.X.(*ast.CallExpr); ok { + return h.GetActualFuncName(x) + } + + case *ast.CallExpr: + return h.GetActualFuncName(actualFunc) + } + return "", false +} + +// ReplaceFunction replaces the function with another one, for fix suggestions +func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { + switch f := caller.Fun.(type) { + case *ast.Ident: + caller.Fun = newExpr + case *ast.SelectorExpr: + f.Sel = newExpr + } +} + +func (dotHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr { + return &ast.CallExpr{ + Fun: ast.NewIdent(name), + Args: []ast.Expr{existing}, + } +} + +func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := assertionFunc.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch fun := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + if gomegainfo.IsActualMethod(fun.Sel.Name) { + if h.isGomegaVar(fun.X) { + return actualExpr + } + } else { + return h.GetActualExpr(fun) + } + } + return nil +} + +func (h dotHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := funcClone.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch funClone := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr) + if gomegainfo.IsActualMethod(funClone.Sel.Name) { + if h.isGomegaVar(origFun.X) { + return actualExpr + } + } else { + return h.GetActualExprClone(origFun, funClone) + } + } + return nil +} + +func (h dotHandler) isGomegaVar(x ast.Expr) bool { + return gomegainfo.IsGomegaVar(x, h.pass) +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go index 4290e73736..4dba604a4f 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go @@ -2,7 +2,8 @@ package gomegahandler import ( "go/ast" - "go/token" + + "golang.org/x/tools/go/analysis" ) const ( @@ -17,15 +18,15 @@ type Handler interface { // ReplaceFunction replaces the function with another one, for fix suggestions ReplaceFunction(*ast.CallExpr, *ast.Ident) - getDefFuncName(expr *ast.CallExpr) string + GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr - getFieldType(field *ast.Field) string + GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr - GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr + GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr } // GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file -func GetGomegaHandler(file *ast.File) Handler { +func GetGomegaHandler(file *ast.File, pass *analysis.Pass) Handler { for _, imp := range file.Imports { if imp.Path.Value != importPath { continue @@ -33,209 +34,15 @@ func GetGomegaHandler(file *ast.File) Handler { switch name := imp.Name.String(); { case name == ".": - return dotHandler{} + return &dotHandler{ + pass: pass, + } case name == "": // import with no local name - return nameHandler("gomega") + return &nameHandler{name: "gomega", pass: pass} default: - return nameHandler(name) + return &nameHandler{name: name, pass: pass} } } return nil // no gomega import; this file does not use gomega } - -// dotHandler is used when importing gomega with dot; i.e. -// import . "github.com/onsi/gomega" -type dotHandler struct{} - -// GetActualFuncName returns the name of the gomega function, e.g. `Expect` -func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { - switch actualFunc := expr.Fun.(type) { - case *ast.Ident: - return actualFunc.Name, true - case *ast.SelectorExpr: - if isGomegaVar(actualFunc.X, h) { - return actualFunc.Sel.Name, true - } - - if x, ok := actualFunc.X.(*ast.CallExpr); ok { - return h.GetActualFuncName(x) - } - - case *ast.CallExpr: - return h.GetActualFuncName(actualFunc) - } - return "", false -} - -// ReplaceFunction replaces the function with another one, for fix suggestions -func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { - switch f := caller.Fun.(type) { - case *ast.Ident: - caller.Fun = newExpr - case *ast.SelectorExpr: - f.Sel = newExpr - } -} - -func (dotHandler) getDefFuncName(expr *ast.CallExpr) string { - if f, ok := expr.Fun.(*ast.Ident); ok { - return f.Name - } - return "" -} - -func (dotHandler) getFieldType(field *ast.Field) string { - switch t := field.Type.(type) { - case *ast.Ident: - return t.Name - case *ast.StarExpr: - if name, ok := t.X.(*ast.Ident); ok { - return name.Name - } - } - return "" -} - -// nameHandler is used when importing gomega without name; i.e. -// import "github.com/onsi/gomega" -// -// or with a custom name; e.g. -// import customname "github.com/onsi/gomega" -type nameHandler string - -// GetActualFuncName returns the name of the gomega function, e.g. `Expect` -func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { - selector, ok := expr.Fun.(*ast.SelectorExpr) - if !ok { - return "", false - } - - switch x := selector.X.(type) { - case *ast.Ident: - if x.Name != string(g) { - if !isGomegaVar(x, g) { - return "", false - } - } - - return selector.Sel.Name, true - - case *ast.CallExpr: - return g.GetActualFuncName(x) - } - - return "", false -} - -// ReplaceFunction replaces the function with another one, for fix suggestions -func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { - caller.Fun.(*ast.SelectorExpr).Sel = newExpr -} - -func (g nameHandler) getDefFuncName(expr *ast.CallExpr) string { - if sel, ok := expr.Fun.(*ast.SelectorExpr); ok { - if f, ok := sel.X.(*ast.Ident); ok && f.Name == string(g) { - return sel.Sel.Name - } - } - return "" -} - -func (g nameHandler) getFieldType(field *ast.Field) string { - switch t := field.Type.(type) { - case *ast.SelectorExpr: - if id, ok := t.X.(*ast.Ident); ok { - if id.Name == string(g) { - return t.Sel.Name - } - } - case *ast.StarExpr: - if sel, ok := t.X.(*ast.SelectorExpr); ok { - if x, ok := sel.X.(*ast.Ident); ok && x.Name == string(g) { - return sel.Sel.Name - } - } - - } - return "" -} - -func isGomegaVar(x ast.Expr, handler Handler) bool { - if i, ok := x.(*ast.Ident); ok { - if i.Obj != nil && i.Obj.Kind == ast.Var { - switch decl := i.Obj.Decl.(type) { - case *ast.AssignStmt: - if decl.Tok == token.DEFINE { - if defFunc, ok := decl.Rhs[0].(*ast.CallExpr); ok { - fName := handler.getDefFuncName(defFunc) - switch fName { - case "NewGomega", "NewWithT", "NewGomegaWithT": - return true - } - } - } - case *ast.Field: - name := handler.getFieldType(decl) - switch name { - case "Gomega", "WithT", "GomegaWithT": - return true - } - } - } - } - return false -} - -func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { - actualExpr, ok := assertionFunc.X.(*ast.CallExpr) - if !ok { - return nil - } - - switch fun := actualExpr.Fun.(type) { - case *ast.Ident: - return actualExpr - case *ast.SelectorExpr: - if isHelperMethods(fun.Sel.Name) { - return h.GetActualExpr(fun) - } - if isGomegaVar(fun.X, h) { - return actualExpr - } - } - return nil -} - -func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { - actualExpr, ok := assertionFunc.X.(*ast.CallExpr) - if !ok { - return nil - } - - switch fun := actualExpr.Fun.(type) { - case *ast.Ident: - return actualExpr - case *ast.SelectorExpr: - if x, ok := fun.X.(*ast.Ident); ok && x.Name == string(g) { - return actualExpr - } - if isHelperMethods(fun.Sel.Name) { - return g.GetActualExpr(fun) - } - - if isGomegaVar(fun.X, g) { - return actualExpr - } - } - return nil -} - -func isHelperMethods(funcName string) bool { - switch funcName { - case "WithOffset", "WithTimeout", "WithPolling", "Within", "ProbeEvery", "WithContext", "WithArguments", "MustPassRepeatedly": - return true - } - - return false -} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go new file mode 100644 index 0000000000..712442426d --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go @@ -0,0 +1,112 @@ +package gomegahandler + +import ( + "go/ast" + + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" + + "golang.org/x/tools/go/analysis" +) + +// nameHandler is used when importing gomega without name; i.e. +// import "github.com/onsi/gomega" +// +// or with a custom name; e.g. +// import customname "github.com/onsi/gomega" +type nameHandler struct { + name string + pass *analysis.Pass +} + +// GetActualFuncName returns the name of the gomega function, e.g. `Expect` +func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) { + selector, ok := expr.Fun.(*ast.SelectorExpr) + if !ok { + return "", false + } + + switch x := selector.X.(type) { + case *ast.Ident: + if x.Name != g.name { + if !g.isGomegaVar(x) { + return "", false + } + } + + return selector.Sel.Name, true + + case *ast.CallExpr: + return g.GetActualFuncName(x) + } + + return "", false +} + +// ReplaceFunction replaces the function with another one, for fix suggestions +func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) { + caller.Fun.(*ast.SelectorExpr).Sel = newExpr +} + +func (g nameHandler) isGomegaVar(x ast.Expr) bool { + return gomegainfo.IsGomegaVar(x, g.pass) +} + +func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := assertionFunc.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch fun := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + if x, ok := fun.X.(*ast.Ident); ok && x.Name == g.name { + return actualExpr + } + if gomegainfo.IsActualMethod(fun.Sel.Name) { + if g.isGomegaVar(fun.X) { + return actualExpr + } + } else { + return g.GetActualExpr(fun) + } + } + return nil +} + +func (g nameHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr { + actualExpr, ok := funcClone.X.(*ast.CallExpr) + if !ok { + return nil + } + + switch funClone := actualExpr.Fun.(type) { + case *ast.Ident: + return actualExpr + case *ast.SelectorExpr: + if x, ok := funClone.X.(*ast.Ident); ok && x.Name == g.name { + return actualExpr + } + origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr) + if gomegainfo.IsActualMethod(funClone.Sel.Name) { + if g.isGomegaVar(origFun.X) { + return actualExpr + } + } else { + return g.GetActualExprClone(origFun, funClone) + } + + } + return nil +} + +func (g nameHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr { + return &ast.CallExpr{ + Fun: &ast.SelectorExpr{ + X: ast.NewIdent(g.name), + Sel: ast.NewIdent(name), + }, + Args: []ast.Expr{existing}, + } +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go new file mode 100644 index 0000000000..ca45a34b27 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go @@ -0,0 +1,113 @@ +package gomegainfo + +import ( + "go/ast" + gotypes "go/types" + "regexp" + + "golang.org/x/tools/go/analysis" +) + +const ( // gomega actual method names + expect = "Expect" + expectWithOffset = "ExpectWithOffset" + omega = "Ω" + eventually = "Eventually" + eventuallyWithOffset = "EventuallyWithOffset" + consistently = "Consistently" + consistentlyWithOffset = "ConsistentlyWithOffset" +) + +const ( // assertion methods + to = "To" + toNot = "ToNot" + notTo = "NotTo" + should = "Should" + shouldNot = "ShouldNot" +) + +var funcOffsetMap = map[string]int{ + expect: 0, + expectWithOffset: 1, + omega: 0, + eventually: 0, + eventuallyWithOffset: 1, + consistently: 0, + consistentlyWithOffset: 1, +} + +func IsActualMethod(name string) bool { + _, found := funcOffsetMap[name] + return found +} + +func ActualArgOffset(methodName string) int { + funcOffset, ok := funcOffsetMap[methodName] + if !ok { + return -1 + } + return funcOffset +} + +func GetAllowedAssertionMethods(actualMethodName string) string { + switch actualMethodName { + case expect, expectWithOffset: + return `"To()", "ToNot()" or "NotTo()"` + + case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset: + return `"Should()" or "ShouldNot()"` + + case omega: + return `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"` + + default: + return "" + } +} + +var asyncFuncSet = map[string]struct{}{ + eventually: {}, + eventuallyWithOffset: {}, + consistently: {}, + consistentlyWithOffset: {}, +} + +func IsAsyncActualMethod(name string) bool { + _, ok := asyncFuncSet[name] + return ok +} + +func IsAssertionFunc(name string) bool { + switch name { + case to, toNot, notTo, should, shouldNot: + return true + } + return false +} + +var gomegaTypeRegex = regexp.MustCompile(`github\.com/onsi/gomega/(?:internal|types)\.Gomega`) + +func IsGomegaVar(x ast.Expr, pass *analysis.Pass) bool { + if tx, ok := pass.TypesInfo.Types[x]; ok { + return IsGomegaType(tx.Type) + } + + return false +} + +func IsGomegaType(t gotypes.Type) bool { + var typeStr string + switch ttx := t.(type) { + case *gotypes.Pointer: + tp := ttx.Elem() + typeStr = tp.String() + + case *gotypes.Named: + typeStr = ttx.String() + + default: + return false + } + + return gomegaTypeRegex.MatchString(typeStr) +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go index dafeacd4ff..91849ca563 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go @@ -72,5 +72,5 @@ func ImplementsError(t gotypes.Type) bool { } func ImplementsGomegaMatcher(t gotypes.Type) bool { - return gotypes.Implements(t, gomegaMatcherType) + return t != nil && gotypes.Implements(t, gomegaMatcherType) } diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go index b8166bdb21..51d55166de 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go @@ -1,285 +1,166 @@ package intervals import ( - "errors" "go/ast" "go/constant" "go/token" gotypes "go/types" - "strconv" "time" "golang.org/x/tools/go/analysis" - - "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" - "github.com/nunnatsa/ginkgolinter/internal/reports" ) -type noDurationIntervalErr struct { - value string -} - -func (err noDurationIntervalErr) Error() string { - return "only use time.Duration for timeout and polling in Eventually() or Consistently()" -} - -func CheckIntervals(pass *analysis.Pass, expr *ast.CallExpr, actualExpr *ast.CallExpr, reportBuilder *reports.Builder, handler gomegahandler.Handler, timePkg string, funcIndex int) { - var ( - timeout time.Duration - polling time.Duration - err error - ) - - timeoutOffset := funcIndex + 1 - if len(actualExpr.Args) > timeoutOffset { - timeout, err = getDuration(pass, actualExpr.Args[timeoutOffset], timePkg) - if err != nil { - suggestFix := false - if tryFixIntDuration(expr, err, handler, timePkg, timeoutOffset) { - suggestFix = true - } - reportBuilder.AddIssue(suggestFix, err.Error()) - } - pollingOffset := funcIndex + 2 - if len(actualExpr.Args) > pollingOffset { - polling, err = getDuration(pass, actualExpr.Args[pollingOffset], timePkg) - if err != nil { - suggestFix := false - if tryFixIntDuration(expr, err, handler, timePkg, pollingOffset) { - suggestFix = true +func GetDuration(pass *analysis.Pass, argOffset int, origInterval, intervalClone ast.Expr, timePkg string) DurationValue { + tv := pass.TypesInfo.Types[origInterval] + argType := tv.Type + if durType, ok := argType.(*gotypes.Named); ok { + if durType.String() == "time.Duration" { + if tv.Value != nil { + if val, ok := constant.Int64Val(tv.Value); ok { + return &RealDurationValue{ + dur: time.Duration(val), + expr: intervalClone, + } } - reportBuilder.AddIssue(suggestFix, err.Error()) + } + return &UnknownDurationTypeValue{ + expr: intervalClone, } } } - selExp := expr.Fun.(*ast.SelectorExpr) - for { - call, ok := selExp.X.(*ast.CallExpr) - if !ok { - break - } - - fun, ok := call.Fun.(*ast.SelectorExpr) - if !ok { - break - } - - switch fun.Sel.Name { - case "WithTimeout", "Within": - if timeout != 0 { - reportBuilder.AddIssue(false, "timeout defined more than once") - } else if len(call.Args) == 1 { - timeout, err = getDurationFromValue(pass, call.Args[0], timePkg) - if err != nil { - reportBuilder.AddIssue(false, err.Error()) + if basic, ok := argType.(*gotypes.Basic); ok && tv.Value != nil { + if basic.Info()&gotypes.IsInteger != 0 { + if num, ok := constant.Int64Val(tv.Value); ok { + return &NumericDurationValue{ + timePkg: timePkg, + numSeconds: num, + offset: argOffset, + dur: time.Duration(num) * time.Second, + expr: intervalClone, } } + } - case "WithPolling", "ProbeEvery": - if polling != 0 { - reportBuilder.AddIssue(false, "polling defined more than once") - } else if len(call.Args) == 1 { - polling, err = getDurationFromValue(pass, call.Args[0], timePkg) - if err != nil { - reportBuilder.AddIssue(false, err.Error()) + if basic.Info()&gotypes.IsFloat != 0 { + if num, ok := constant.Float64Val(tv.Value); ok { + return &NumericDurationValue{ + timePkg: timePkg, + numSeconds: int64(num), + offset: argOffset, + dur: time.Duration(num) * time.Second, + expr: intervalClone, } } } - - selExp = fun } - if timeout != 0 && polling != 0 && timeout < polling { - reportBuilder.AddIssue(false, "timeout must not be shorter than the polling interval") - } + return &UnknownDurationValue{expr: intervalClone} } -func tryFixIntDuration(expr *ast.CallExpr, err error, handler gomegahandler.Handler, timePkg string, offset int) bool { - suggestFix := false - var durErr noDurationIntervalErr - if errors.As(err, &durErr) { - if len(durErr.value) > 0 { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - var newArg ast.Expr - second := &ast.SelectorExpr{ - Sel: ast.NewIdent("Second"), - X: ast.NewIdent(timePkg), +func GetDurationFromValue(pass *analysis.Pass, orig, clone ast.Expr) DurationValue { + tv := pass.TypesInfo.Types[orig] + interval := tv.Value + if interval != nil { + if val, ok := constant.Int64Val(interval); ok { + return RealDurationValue{ + dur: time.Duration(val), + expr: orig, } - if durErr.value == "1" { - newArg = second - } else { - newArg = &ast.BinaryExpr{ - X: second, - Op: token.MUL, - Y: actualExpr.Args[offset], - } - } - actualExpr.Args[offset] = newArg - suggestFix = true } } - - return suggestFix + return UnknownDurationTypeValue{expr: clone} } -func getDuration(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) { - argType := pass.TypesInfo.TypeOf(interval) - if durType, ok := argType.(*gotypes.Named); ok { - if durType.Obj().Name() == "Duration" && durType.Obj().Pkg().Name() == "time" { - return getDurationFromValue(pass, interval, timePkg) - } - } +type DurationValue interface { + Duration() time.Duration +} - value := "" - switch val := interval.(type) { - case *ast.BasicLit: - if val.Kind == token.INT { - value = val.Value - } - case *ast.Ident: - i, err := getConstDuration(pass, val, timePkg) - if err != nil || i == 0 { - return 0, nil - } - value = val.Name - } +type NumericValue interface { + GetOffset() int + GetDurationExpr() ast.Expr +} +type RealDurationValue struct { + dur time.Duration + expr ast.Expr +} - return 0, noDurationIntervalErr{value: value} +func (r RealDurationValue) Duration() time.Duration { + return r.dur } -func getDurationFromValue(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) { - switch dur := interval.(type) { - case *ast.SelectorExpr: - ident, ok := dur.X.(*ast.Ident) - if ok { - if ident.Name == timePkg { - return getTimeDurationValue(dur) - } - return getDurationFromValue(pass, dur.Sel, timePkg) - } - case *ast.BinaryExpr: - return getBinaryExprDuration(pass, dur, timePkg) +type NumericDurationValue struct { + timePkg string + numSeconds int64 + offset int + dur time.Duration + expr ast.Expr +} - case *ast.Ident: - return getConstDuration(pass, dur, timePkg) - } +func (r *NumericDurationValue) Duration() time.Duration { + return r.dur +} - return 0, nil +func (r *NumericDurationValue) GetOffset() int { + return r.offset } -func getConstDuration(pass *analysis.Pass, ident *ast.Ident, timePkg string) (time.Duration, error) { - o := pass.TypesInfo.ObjectOf(ident) - if o != nil { - if c, ok := o.(*gotypes.Const); ok { - if c.Val().Kind() == constant.Int { - i, err := strconv.Atoi(c.Val().String()) - if err != nil { - return 0, nil - } - return time.Duration(i), nil - } - } +func (r *NumericDurationValue) GetDurationExpr() ast.Expr { + var newArg ast.Expr + second := &ast.SelectorExpr{ + Sel: ast.NewIdent("Second"), + X: ast.NewIdent(r.timePkg), } - if ident.Obj != nil && ident.Obj.Kind == ast.Con && ident.Obj.Decl != nil { - if vals, ok := ident.Obj.Decl.(*ast.ValueSpec); ok { - if len(vals.Values) == 1 { - switch val := vals.Values[0].(type) { - case *ast.BasicLit: - if val.Kind == token.INT { - i, err := strconv.Atoi(val.Value) - if err != nil { - return 0, nil - } - return time.Duration(i), nil - } - return 0, nil - case *ast.BinaryExpr: - return getBinaryExprDuration(pass, val, timePkg) - } - } + if r.numSeconds == 1 { + newArg = second + } else { + newArg = &ast.BinaryExpr{ + X: second, + Op: token.MUL, + Y: r.expr, } } - return 0, nil + return newArg } -func getTimeDurationValue(dur *ast.SelectorExpr) (time.Duration, error) { - switch dur.Sel.Name { - case "Nanosecond": - return time.Nanosecond, nil - case "Microsecond": - return time.Microsecond, nil - case "Millisecond": - return time.Millisecond, nil - case "Second": - return time.Second, nil - case "Minute": - return time.Minute, nil - case "Hour": - return time.Hour, nil - default: - return 0, errors.New("unknown duration value") // should never happen - } +type UnknownDurationValue struct { + expr ast.Expr } -func getBinaryExprDuration(pass *analysis.Pass, expr *ast.BinaryExpr, timePkg string) (time.Duration, error) { - x, err := getBinaryDurValue(pass, expr.X, timePkg) - if err != nil || x == 0 { - return 0, nil - } - y, err := getBinaryDurValue(pass, expr.Y, timePkg) - if err != nil || y == 0 { - return 0, nil - } +func (r UnknownDurationValue) Duration() time.Duration { + return 0 +} - switch expr.Op { - case token.ADD: - return x + y, nil - case token.SUB: - val := x - y - if val > 0 { - return val, nil - } - return 0, nil - case token.MUL: - return x * y, nil - case token.QUO: - if y == 0 { - return 0, nil - } - return x / y, nil - case token.REM: - if y == 0 { - return 0, nil - } - return x % y, nil - default: - return 0, nil - } +type UnknownNumericValue struct { + expr ast.Expr + offset int } -func getBinaryDurValue(pass *analysis.Pass, expr ast.Expr, timePkg string) (time.Duration, error) { - switch x := expr.(type) { - case *ast.SelectorExpr: - return getDurationFromValue(pass, x, timePkg) - case *ast.BinaryExpr: - return getBinaryExprDuration(pass, x, timePkg) - case *ast.BasicLit: - if x.Kind == token.INT { - val, err := strconv.Atoi(x.Value) - if err != nil { - return 0, err - } - return time.Duration(val), nil - } - case *ast.ParenExpr: - return getBinaryDurValue(pass, x.X, timePkg) +func (r UnknownNumericValue) Duration() time.Duration { + return 0 +} - case *ast.Ident: - return getConstDuration(pass, x, timePkg) +func (r UnknownNumericValue) GetDurationExpr() ast.Expr { + return &ast.BinaryExpr{ + X: &ast.SelectorExpr{ + Sel: ast.NewIdent("Second"), + X: ast.NewIdent("time"), + }, + Op: token.MUL, + Y: r.expr, } +} + +func (r UnknownNumericValue) GetOffset() int { + return r.offset +} + +type UnknownDurationTypeValue struct { + expr ast.Expr +} - return 0, nil +func (r UnknownDurationTypeValue) Duration() time.Duration { + return 0 } diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go index c7f931ca75..dee88bd2c8 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go @@ -1,13 +1,13 @@ package reports import ( - "bytes" "fmt" "go/ast" - "go/printer" "go/token" "strings" + "github.com/nunnatsa/ginkgolinter/internal/formatter" + "golang.org/x/tools/go/analysis" ) @@ -18,19 +18,25 @@ type Builder struct { issues []string fixOffer string suggestFix bool + formatter *formatter.GoFmtFormatter } -func NewBuilder(fset *token.FileSet, oldExpr ast.Expr) *Builder { +func NewBuilder(oldExpr ast.Expr, expFormatter *formatter.GoFmtFormatter) *Builder { b := &Builder{ pos: oldExpr.Pos(), end: oldExpr.End(), - oldExpr: goFmt(fset, oldExpr), + oldExpr: expFormatter.Format(oldExpr), suggestFix: false, + formatter: expFormatter, } return b } +func (b *Builder) OldExp() string { + return b.oldExpr +} + func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) { if len(args) > 0 { issue = fmt.Sprintf(issue, args...) @@ -42,9 +48,11 @@ func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) { } } -func (b *Builder) SetFixOffer(fset *token.FileSet, fixOffer ast.Expr) { - if offer := goFmt(fset, fixOffer); offer != b.oldExpr { - b.fixOffer = offer +func (b *Builder) SetFixOffer(fixOffer ast.Expr) { + if b.suggestFix { + if offer := b.formatter.Format(fixOffer); offer != b.oldExpr { + b.fixOffer = offer + } } } @@ -76,10 +84,8 @@ func (b *Builder) Build() analysis.Diagnostic { return diagnostic } -func goFmt(fset *token.FileSet, x ast.Expr) string { - var b bytes.Buffer - _ = printer.Fprint(&b, fset, x) - return b.String() +func (b *Builder) FormatExpr(expr ast.Expr) string { + return b.formatter.Format(expr) } func (b *Builder) getMessage() string { diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go new file mode 100644 index 0000000000..e4eda7f6cf --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go @@ -0,0 +1,41 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const valueInEventually = "use a function call in %[1]s. This actually checks nothing, because %[1]s receives the function returned value, instead of function itself, and this value is never changed" + +// AsyncFuncCallRule checks that there is no function call actual parameter, +// in an async actual method (e.g. Eventually). +// +// Async actual methods should get the function itself, not a function call, because +// then there is no async operation at all, and we're waiting for the function to be +// returned before calling the assertion. +// +// We do allow functions that return a function, a channel or a pointer. +type AsyncFuncCallRule struct{} + +func (r AsyncFuncCallRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + if bool(config.SuppressAsync) || !gexp.IsAsync() { + return false + } + + if asyncArg := gexp.GetAsyncActualArg(); asyncRules != nil { + return !asyncArg.IsValid() + } + + return false +} + +func (r AsyncFuncCallRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if r.isApplied(gexp, config) { + + gexp.AppendWithArgsToActual() + + reportBuilder.AddIssue(true, valueInEventually, gexp.GetActualFuncName()) + } + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go new file mode 100644 index 0000000000..803c705deb --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go @@ -0,0 +1,30 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type AsyncSucceedRule struct{} + +func (AsyncSucceedRule) isApply(gexp *expression.GomegaExpression) bool { + return gexp.IsAsync() && + gexp.MatcherTypeIs(matcher.SucceedMatcherType) && + gexp.ActualArgTypeIs(actual.FuncSigArgType) && + !gexp.ActualArgTypeIs(actual.ErrorTypeArgType|actual.GomegaParamArgType) +} + +func (r AsyncSucceedRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool { + if r.isApply(gexp) { + if gexp.ActualArgTypeIs(actual.MultiRetsArgType) { + reportBuilder.AddIssue(false, "Success matcher does not support multiple values") + } else { + reportBuilder.AddIssue(false, "Success matcher only support a single error value, or function with Gomega as its first parameter") + } + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go new file mode 100644 index 0000000000..45953ec01e --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go @@ -0,0 +1,79 @@ +package rules + +import ( + "go/ast" + "time" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/intervals" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const ( + multipleTimeouts = "timeout defined more than once" + multiplePolling = "polling defined more than once" + onlyUseTimeDurationForInterval = "only use time.Duration for timeout and polling in Eventually() or Consistently()" + pollingGreaterThanTimeout = "timeout must not be shorter than the polling interval" +) + +type AsyncTimeIntervalsRule struct{} + +func (r AsyncTimeIntervalsRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + return !bool(config.SuppressAsync) && bool(config.ValidateAsyncIntervals) && gexp.IsAsync() +} + +func (r AsyncTimeIntervalsRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if r.isApplied(gexp, config) { + asyncArg := gexp.GetAsyncActualArg() + if asyncArg.TooManyTimeouts() { + reportBuilder.AddIssue(false, multipleTimeouts) + } + + if asyncArg.TooManyPolling() { + reportBuilder.AddIssue(false, multiplePolling) + } + + timeoutDuration := checkInterval(gexp, asyncArg.Timeout(), reportBuilder) + pollingDuration := checkInterval(gexp, asyncArg.Polling(), reportBuilder) + + if timeoutDuration > 0 && pollingDuration > 0 && pollingDuration > timeoutDuration { + reportBuilder.AddIssue(false, pollingGreaterThanTimeout) + } + } + + return false +} + +func checkInterval(gexp *expression.GomegaExpression, durVal intervals.DurationValue, reportBuilder *reports.Builder) time.Duration { + if durVal != nil { + switch to := durVal.(type) { + case *intervals.RealDurationValue, *intervals.UnknownDurationTypeValue: + + case *intervals.NumericDurationValue: + if checkNumericInterval(gexp.GetActualClone(), to) { + reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval) + } + + case *intervals.UnknownDurationValue: + reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval) + } + + return durVal.Duration() + } + + return 0 +} + +func checkNumericInterval(intervalMethod *ast.CallExpr, interval intervals.DurationValue) bool { + if interval != nil { + if numVal, ok := interval.(intervals.NumericValue); ok { + if offset := numVal.GetOffset(); offset > 0 { + intervalMethod.Args[offset] = numVal.GetDurationExpr() + return true + } + } + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go new file mode 100644 index 0000000000..e3ad45d960 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go @@ -0,0 +1,128 @@ +package rules + +import ( + "go/token" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const wrongCapWarningTemplate = "wrong cap assertion" + +// CapRule does not allow using the cap() function in actual with numeric comparison. +// it suggests to use the HaveLen matcher, instead. +type CapRule struct{} + +func (r *CapRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + + if !r.isApplied(gexp, config) { + return false + } + + if r.fixExpression(gexp) { + reportBuilder.AddIssue(true, wrongCapWarningTemplate) + return true + } + return false +} + +func (r *CapRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + if config.SuppressLen { + return false + } + + //matcherType := gexp.matcher.GetMatcherInfo().Type() + if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) { + if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) { + return true + } + + if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) { + mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher) + return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero) + } + } + + if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) { + return true + } + + return false +} + +func (r *CapRule) fixExpression(gexp *expression.GomegaExpression) bool { + if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) { + return r.fixEqual(gexp) + } + + if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) { + return r.fixComparison(gexp) + } + + return false +} + +func (r *CapRule) fixEqual(gexp *expression.GomegaExpression) bool { + matcherInfo := gexp.GetMatcherInfo() + switch mtchr := matcherInfo.(type) { + case *matcher.EqualMatcher: + gexp.SetMatcherCap(mtchr.GetValueExpr()) + + case *matcher.BeZeroMatcher: + gexp.SetMatcherCapZero() + + case *matcher.BeNumericallyMatcher: + if !r.handleBeNumerically(gexp, mtchr) { + return false + } + + default: + return false + } + + gexp.ReplaceActualWithItsFirstArg() + + return true +} + +func (r *CapRule) fixComparison(gexp *expression.GomegaExpression) bool { + actl := gexp.GetActualArg().(*actual.FuncComparisonPayload) + if op := actl.GetOp(); op == token.NEQ { + gexp.ReverseAssertionFuncLogic() + } else if op != token.EQL { + return false + } + + gexp.SetMatcherCap(actl.GetValueExpr()) + gexp.ReplaceActual(actl.GetFuncArg()) + + if gexp.MatcherTypeIs(matcher.BoolValueFalse) { + gexp.ReverseAssertionFuncLogic() + } + + return true +} + +func (r *CapRule) handleBeNumerically(gexp *expression.GomegaExpression, matcher *matcher.BeNumericallyMatcher) bool { + op := matcher.GetOp() + val := matcher.GetValue() + isValZero := val.String() == "0" + isValOne := val.String() == "1" + + if (op == token.GTR && isValZero) || (op == token.GEQ && isValOne) { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherCapZero() + } else if op == token.EQL { + gexp.SetMatcherCap(matcher.GetValueExpr()) + } else if op == token.NEQ { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherCap(matcher.GetValueExpr()) + } else { + return false + } + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go new file mode 100644 index 0000000000..dcbea1bc97 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go @@ -0,0 +1,64 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const comparePointerToValue = "comparing a pointer to a value will always fail" + +type ComparePointRule struct{} + +func (r ComparePointRule) isApplied(gexp *expression.GomegaExpression) bool { + actl, ok := gexp.GetActualArg().(*actual.RegularArgPayload) + if !ok { + return false + } + + return actl.IsPointer() +} + +func (r ComparePointRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + switch mtchr := gexp.GetMatcherInfo().(type) { + case *matcher.EqualMatcher: + if mtchr.IsPointer() || mtchr.IsInterface() { + return false + } + + case *matcher.BeEquivalentToMatcher: + if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() { + return false + } + + case *matcher.BeIdenticalToMatcher: + if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() { + return false + } + + case *matcher.EqualNilMatcher: + return false + + case *matcher.BeTrueMatcher, + *matcher.BeFalseMatcher, + *matcher.BeNumericallyMatcher, + *matcher.EqualTrueMatcher, + *matcher.EqualFalseMatcher: + + default: + return false + } + + getMatcherOnlyRules().Apply(gexp, config, reportBuilder) + + gexp.SetMatcherHaveValue() + reportBuilder.AddIssue(true, comparePointerToValue) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go new file mode 100644 index 0000000000..fb38529e0e --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go @@ -0,0 +1,75 @@ +package rules + +import ( + "go/token" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const wrongCompareWarningTemplate = "wrong comparison assertion" + +type ComparisonRule struct{} + +func (r ComparisonRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + if config.SuppressCompare { + return false + } + + return gexp.ActualArgTypeIs(actual.ComparisonActualArgType) +} + +func (r ComparisonRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp, config) { + return false + } + + actl, ok := gexp.GetActualArg().(actual.ComparisonActualPayload) + if !ok { + return false + } + + switch actl.GetOp() { + case token.EQL: + r.handleEqualComparison(gexp, actl) + + case token.NEQ: + gexp.ReverseAssertionFuncLogic() + r.handleEqualComparison(gexp, actl) + case token.GTR, token.GEQ, token.LSS, token.LEQ: + if !actl.GetRight().IsValueNumeric() { + return false + } + + gexp.SetMatcherBeNumerically(actl.GetOp(), actl.GetRight().GetValueExpr()) + + default: + return false + } + + if gexp.MatcherTypeIs(matcher.BoolValueFalse) { + gexp.ReverseAssertionFuncLogic() + } + + gexp.ReplaceActual(actl.GetLeft().GetValueExpr()) + + reportBuilder.AddIssue(true, wrongCompareWarningTemplate) + return true +} + +func (r ComparisonRule) handleEqualComparison(gexp *expression.GomegaExpression, actual actual.ComparisonActualPayload) { + if actual.GetRight().IsValueZero() { + gexp.SetMatcherBeZero() + } else { + left := actual.GetLeft() + arg := actual.GetRight().GetValueExpr() + if left.IsInterface() || left.IsPointer() { + gexp.SetMatcherBeIdenticalTo(arg) + } else { + gexp.SetMatcherEqual(arg) + } + } +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go new file mode 100644 index 0000000000..6ce7be5a54 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go @@ -0,0 +1,30 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const doubleNegativeWarningTemplate = "avoid double negative assertion" + +type DoubleNegativeRule struct{} + +func (DoubleNegativeRule) isApplied(gexp *expression.GomegaExpression) bool { + return gexp.MatcherTypeIs(matcher.BeFalseMatcherType) && + gexp.IsNegativeAssertion() +} + +func (r DoubleNegativeRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherBeTrue() + + reportBuilder.AddIssue(true, doubleNegativeWarningTemplate) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go new file mode 100644 index 0000000000..e9eaa1b801 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go @@ -0,0 +1,36 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const wrongBoolWarningTemplate = "wrong boolean assertion" + +type EqualBoolRule struct{} + +func (r EqualBoolRule) isApplied(gexp *expression.GomegaExpression) bool { + return gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType) +} + +func (r EqualBoolRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + if gexp.MatcherTypeIs(matcher.BoolValueTrue) { + gexp.SetMatcherBeTrue() + } else { + if gexp.IsNegativeAssertion() { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherBeTrue() + } else { + gexp.SetMatcherBeFalse() + } + } + + reportBuilder.AddIssue(true, wrongBoolWarningTemplate) + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go new file mode 100644 index 0000000000..81d703bb89 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go @@ -0,0 +1,119 @@ +package rules + +import ( + gotypes "go/types" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()" + +type EqualDifferentTypesRule struct{} + +func (r EqualDifferentTypesRule) isApplied(config types.Config) bool { + return !bool(config.SuppressTypeCompare) +} + +func (r EqualDifferentTypesRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(config) { + return false + } + + return r.checkEqualDifferentTypes(gexp, gexp.GetMatcher(), false, reportBuilder) +} + +func (r EqualDifferentTypesRule) checkEqualDifferentTypes(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, parentPointer bool, reportBuilder *reports.Builder) bool { + actualType := gexp.GetActualArgGOType() + + if parentPointer { + if t, ok := actualType.(*gotypes.Pointer); ok { + actualType = t.Elem() + } + } + + var ( + matcherType gotypes.Type + matcherName string + ) + + switch specificMatcher := mtchr.GetMatcherInfo().(type) { + case *matcher.EqualMatcher: + matcherType = specificMatcher.GetType() + matcherName = specificMatcher.MatcherName() + + case *matcher.BeIdenticalToMatcher: + matcherType = specificMatcher.GetType() + matcherName = specificMatcher.MatcherName() + + case *matcher.HaveValueMatcher: + return r.checkEqualDifferentTypes(gexp, specificMatcher.GetNested(), true, reportBuilder) + + case *matcher.MultipleMatchersMatcher: + foundIssue := false + for i := range specificMatcher.Len() { + if r.checkEqualDifferentTypes(gexp, specificMatcher.At(i), parentPointer, reportBuilder) { + foundIssue = true + } + + } + return foundIssue + + case *matcher.EqualNilMatcher: + matcherType = specificMatcher.GetType() + matcherName = specificMatcher.MatcherName() + + case *matcher.WithTransformMatcher: + nested := specificMatcher.GetNested() + switch specificNested := nested.GetMatcherInfo().(type) { + case *matcher.EqualMatcher: + matcherType = specificNested.GetType() + matcherName = specificNested.MatcherName() + + case *matcher.BeIdenticalToMatcher: + matcherType = specificNested.GetType() + matcherName = specificNested.MatcherName() + + default: + return false + } + + actualType = specificMatcher.GetFuncType() + default: + return false + } + + if !gotypes.Identical(matcherType, actualType) { + if r.isImplementing(matcherType, actualType) || r.isImplementing(actualType, matcherType) { + return false + } + + reportBuilder.AddIssue(false, compareDifferentTypes, matcherName, actualType, matcherType) + return true + } + + return false +} + +func (r EqualDifferentTypesRule) isImplementing(ifs, impl gotypes.Type) bool { + if gotypes.IsInterface(ifs) { + + var ( + theIfs *gotypes.Interface + ok bool + ) + + for { + theIfs, ok = ifs.(*gotypes.Interface) + if ok { + break + } + ifs = ifs.Underlying() + } + + return gotypes.Implements(impl, theIfs) + } + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go new file mode 100644 index 0000000000..5b28e7d9b7 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go @@ -0,0 +1,29 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +// EqualNilRule validate that there is no use of Equal(nil) in the code +// It is part of assertion only rules +type EqualNilRule struct{} + +func (r EqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + return !bool(config.SuppressNil) && + gexp.MatcherTypeIs(matcher.EqualValueMatcherType) +} + +func (r EqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp, config) { + return false + } + + gexp.SetMatcherBeNil() + + reportBuilder.AddIssue(true, wrongNilWarningTemplate) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go new file mode 100644 index 0000000000..7aaf7631b0 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go @@ -0,0 +1,35 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/expression/value" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type ErrorEqualNilRule struct{} + +func (ErrorEqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + return !bool(config.SuppressErr) && + gexp.ActualArgTypeIs(actual.ErrorTypeArgType) && + gexp.MatcherTypeIs(matcher.BeNilMatcherType|matcher.EqualNilMatcherType) +} + +func (r ErrorEqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp, config) { + return false + } + + if v, ok := gexp.GetActualArg().(value.Valuer); ok && v.IsFunc() || gexp.ActualArgTypeIs(actual.ErrFuncActualArgType) { + gexp.SetMatcherSucceed() + } else { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherHaveOccurred() + } + + reportBuilder.AddIssue(true, wrongErrWarningTemplate) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go new file mode 100644 index 0000000000..391d1d449b --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go @@ -0,0 +1,43 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const forceExpectToTemplate = "must not use %s with %s" + +type ForceExpectToRule struct{} + +func (ForceExpectToRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + if !config.ForceExpectTo { + return false + } + + actlName := gexp.GetActualFuncName() + return actlName == "Expect" || actlName == "ExpectWithOffset" +} + +func (r ForceExpectToRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp, config) { + return false + } + + var newName string + + switch gexp.GetAssertFuncName() { + case "Should": + newName = "To" + case "ShouldNot": + newName = "ToNot" + default: + return false + } + + gexp.ReplaceAssertionMethod(newName) + reportBuilder.AddIssue(true, forceExpectToTemplate, gexp.GetActualFuncName(), gexp.GetOrigAssertFuncName()) + + // always return false, to keep checking another rules. + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go new file mode 100644 index 0000000000..20bcb72117 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go @@ -0,0 +1,23 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type HaveLen0 struct{} + +func (r *HaveLen0) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + return gexp.MatcherTypeIs(matcher.HaveLenZeroMatcherType) && !bool(config.AllowHaveLen0) +} + +func (r *HaveLen0) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp, config) { + return false + } + gexp.SetMatcherBeEmpty() + reportBuilder.AddIssue(true, wrongLengthWarningTemplate) + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go new file mode 100644 index 0000000000..437d3ee23f --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go @@ -0,0 +1,35 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type HaveOccurredRule struct{} + +func (r HaveOccurredRule) isApplied(gexp *expression.GomegaExpression) bool { + return gexp.MatcherTypeIs(matcher.HaveOccurredMatcherType) +} + +func (r HaveOccurredRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) { + reportBuilder.AddIssue(false, "asserting a non-error type with HaveOccurred matcher") + return true + } + + if bool(config.ForceSucceedForFuncs) && gexp.GetActualArg().(*actual.ErrPayload).IsFunc() { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherSucceed() + reportBuilder.AddIssue(true, "prefer using the Succeed matcher for error function, instead of HaveOccurred") + return true + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go new file mode 100644 index 0000000000..06d6f2c687 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go @@ -0,0 +1,119 @@ +package rules + +import ( + "go/token" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const wrongLengthWarningTemplate = "wrong length assertion" + +// LenRule does not allow using the len() function in actual with numeric comparison. Instead, +// it suggests to use the HaveLen matcher, or the BeEmpty matcher, if comparing to zero. +type LenRule struct{} + +func (r *LenRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + + if !r.isApplied(gexp, config) { + return false + } + + if r.fixExpression(gexp) { + reportBuilder.AddIssue(true, wrongLengthWarningTemplate) + return true + } + return false +} + +func (r *LenRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool { + if config.SuppressLen { + return false + } + + if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) { + if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) { + return true + } + + if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) { + mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher) + return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero) + } + } + + if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) { + return true + } + + return false +} + +func (r *LenRule) fixExpression(gexp *expression.GomegaExpression) bool { + if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) { + return r.fixEqual(gexp) + } + + if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) { + return r.fixComparison(gexp) + } + + return false +} + +func (r *LenRule) fixEqual(gexp *expression.GomegaExpression) bool { + + if gexp.MatcherTypeIs(matcher.EqualMatcherType) { + gexp.SetLenNumericMatcher() + + } else if gexp.MatcherTypeIs(matcher.BeZeroMatcherType) { + gexp.SetMatcherBeEmpty() + + } else if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) { + mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher) + op := mtchr.GetOp() + + if op == token.EQL { + gexp.SetLenNumericMatcher() + } else if op == token.NEQ { + gexp.ReverseAssertionFuncLogic() + gexp.SetLenNumericMatcher() + } else if gexp.MatcherTypeIs(matcher.GreaterThanZero) { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherBeEmpty() + } else { + return false + } + } else { + return false + } + + gexp.ReplaceActualWithItsFirstArg() + return true +} + +func (r *LenRule) fixComparison(gexp *expression.GomegaExpression) bool { + actl := gexp.GetActualArg().(*actual.FuncComparisonPayload) + if op := actl.GetOp(); op == token.NEQ { + gexp.ReverseAssertionFuncLogic() + } else if op != token.EQL { + return false + } + + if gexp.MatcherTypeIs(matcher.BoolValueFalse) { + gexp.ReverseAssertionFuncLogic() + } + + if actl.IsValueZero() { + gexp.SetMatcherBeEmpty() + } else { + gexp.SetMatcherLen(actl.GetValueExpr()) + } + + gexp.ReplaceActual(actl.GetFuncArg()) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go new file mode 100644 index 0000000000..1174393c6b --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go @@ -0,0 +1,12 @@ +package rules + +var matcherOnlyRules = Rules{ + &HaveLen0{}, + &EqualBoolRule{}, + &EqualNilRule{}, + &DoubleNegativeRule{}, +} + +func getMatcherOnlyRules() Rules { + return matcherOnlyRules +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go new file mode 100644 index 0000000000..767b4b621e --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go @@ -0,0 +1,110 @@ +package rules + +import ( + "go/ast" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const ( + matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)" + matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed" + matchErrorMissingDescription = "missing function description as second parameter of MatchError" + matchErrorRedundantArg = "redundant MatchError arguments; consider removing them" + matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)" +) + +// MatchErrorRule validates the usage of the MatchError matcher. +// +// # First, it checks that the actual value is actually an error +// +// Then, it checks the matcher itself: this matcher can be used in 3 different ways: +// 1. With error type variable +// 2. With another gomega matcher, to check the actual err.Error() value +// 3. With function with a signature of func(error) bool. In this case, additional description +// string variable is required. +type MatchErrorRule struct{} + +func (r MatchErrorRule) isApplied(gexp *expression.GomegaExpression) bool { + return gexp.MatcherTypeIs(matcher.MatchErrorMatcherType | matcher.MultipleMatcherMatherType) +} + +func (r MatchErrorRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + return checkMatchError(gexp, reportBuilder) +} + +func checkMatchError(gexp *expression.GomegaExpression, reportBuilder *reports.Builder) bool { + mtchr := gexp.GetMatcherInfo() + switch m := mtchr.(type) { + case matcher.MatchErrorMatcher: + return checkMatchErrorMatcher(gexp, gexp.GetMatcher(), m, reportBuilder) + + case *matcher.MultipleMatchersMatcher: + res := false + for i := range m.Len() { + nested := m.At(i) + if specific, ok := nested.GetMatcherInfo().(matcher.MatchErrorMatcher); ok { + if valid := checkMatchErrorMatcher(gexp, gexp.GetMatcher(), specific, reportBuilder); valid { + res = true + } + } + } + return res + default: + return false + } +} + +func checkMatchErrorMatcher(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, mtchrInfo matcher.MatchErrorMatcher, reportBuilder *reports.Builder) bool { + if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) { + reportBuilder.AddIssue(false, matchErrorArgWrongType, reportBuilder.FormatExpr(gexp.GetActualArgExpr())) + } + + switch m := mtchrInfo.(type) { + case *matcher.InvalidMatchErrorMatcher: + reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, reportBuilder.FormatExpr(mtchr.Clone.Args[0])) + + case *matcher.MatchErrorMatcherWithErrFunc: + if m.NumArgs() == m.AllowedNumArgs() { + if !m.IsSecondArgString() { + reportBuilder.AddIssue(false, matchErrorNoFuncDescription) + } + return true + } + + if m.NumArgs() == 1 { + reportBuilder.AddIssue(false, matchErrorMissingDescription) + return true + } + + case *matcher.MatchErrorMatcherWithErr, + *matcher.MatchErrorMatcherWithMatcher, + *matcher.MatchErrorMatcherWithString: + // continue + default: + return false + } + + if mtchrInfo.NumArgs() == mtchrInfo.AllowedNumArgs() { + return true + } + + if mtchrInfo.NumArgs() > mtchrInfo.AllowedNumArgs() { + var newArgsSuggestion []ast.Expr + for i := 0; i < mtchrInfo.AllowedNumArgs(); i++ { + newArgsSuggestion = append(newArgsSuggestion, mtchr.Clone.Args[i]) + } + mtchr.Clone.Args = newArgsSuggestion + reportBuilder.AddIssue(false, matchErrorRedundantArg) + return true + } + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go new file mode 100644 index 0000000000..43fc58bf6b --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go @@ -0,0 +1,27 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/gomegainfo" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const missingAssertionMessage = `%q: missing assertion method. Expected %s` + +type MissingAssertionRule struct{} + +func (r MissingAssertionRule) isApplied(gexp *expression.GomegaExpression) bool { + return gexp.IsMissingAssertion() +} + +func (r MissingAssertionRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + actualMethodName := gexp.GetActualFuncName() + reportBuilder.AddIssue(false, missingAssertionMessage, actualMethodName, gomegainfo.GetAllowedAssertionMethods(actualMethodName)) + + return true +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go new file mode 100644 index 0000000000..fc3cd49e53 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go @@ -0,0 +1,75 @@ +package rules + +import ( + "go/token" + + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +const ( + wrongNilWarningTemplate = "wrong nil assertion" + wrongErrWarningTemplate = "wrong error assertion" +) + +type NilCompareRule struct{} + +func (r NilCompareRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + isErr, ruleApplied := r.isApplied(gexp, config) + if !ruleApplied { + return false + } + + if gexp.MatcherTypeIs(matcher.BoolValueFalse) { + gexp.ReverseAssertionFuncLogic() + } + + r.handleNilBeBoolMatcher(gexp, gexp.GetActualArg().(*actual.NilComparisonPayload), reportBuilder, isErr) + + return true +} + +func (r NilCompareRule) isApplied(gexp *expression.GomegaExpression, config types.Config) (bool, bool) { + if !gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType | matcher.BeTrueMatcherType | matcher.BeFalseMatcherType) { + return false, false + } + + actl, ok := gexp.GetActualArg().(*actual.NilComparisonPayload) + if !ok { + return false, false + } + + isErr := actl.IsError() && !bool(config.SuppressErr) + + if !isErr && bool(config.SuppressNil) { + return isErr, false + } + + return isErr, true +} + +func (r NilCompareRule) handleNilBeBoolMatcher(gexp *expression.GomegaExpression, actl *actual.NilComparisonPayload, reportBuilder *reports.Builder, isErr bool) { + template := wrongNilWarningTemplate + if isErr { + template = wrongErrWarningTemplate + if actl.IsFunc() { + gexp.SetMatcherSucceed() + } else { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherHaveOccurred() + } + } else { + gexp.SetMatcherBeNil() + } + + gexp.ReplaceActual(actl.GetValueExpr()) + + if actl.GetOp() == token.NEQ { + gexp.ReverseAssertionFuncLogic() + } + + reportBuilder.AddIssue(true, template) +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go new file mode 100644 index 0000000000..cf331c21c4 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go @@ -0,0 +1,61 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type Rule interface { + Apply(*expression.GomegaExpression, types.Config, *reports.Builder) bool +} + +var rules = Rules{ + &ForceExpectToRule{}, + &LenRule{}, + &CapRule{}, + &ComparisonRule{}, + &NilCompareRule{}, + &ComparePointRule{}, + &ErrorEqualNilRule{}, + &MatchErrorRule{}, + getMatcherOnlyRules(), + &EqualDifferentTypesRule{}, + &HaveOccurredRule{}, + &SucceedRule{}, +} + +var asyncRules = Rules{ + &AsyncFuncCallRule{}, + &AsyncTimeIntervalsRule{}, + &ErrorEqualNilRule{}, + &MatchErrorRule{}, + &AsyncSucceedRule{}, + getMatcherOnlyRules(), +} + +func GetRules() Rules { + return rules +} + +func GetAsyncRules() Rules { + return asyncRules +} + +type Rules []Rule + +func (r Rules) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + for _, rule := range r { + if rule.Apply(gexp, config, reportBuilder) { + return true + } + } + + return false +} + +var missingAssertionRule = MissingAssertionRule{} + +func GetMissingAssertionRule() Rule { + return missingAssertionRule +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go new file mode 100644 index 0000000000..6a5167a8a0 --- /dev/null +++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go @@ -0,0 +1,41 @@ +package rules + +import ( + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/expression/actual" + "github.com/nunnatsa/ginkgolinter/internal/expression/matcher" + "github.com/nunnatsa/ginkgolinter/internal/reports" + "github.com/nunnatsa/ginkgolinter/types" +) + +type SucceedRule struct{} + +func (r SucceedRule) isApplied(gexp *expression.GomegaExpression) bool { + return !gexp.IsAsync() && gexp.MatcherTypeIs(matcher.SucceedMatcherType) +} + +func (r SucceedRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool { + if !r.isApplied(gexp) { + return false + } + + if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) { + if gexp.IsActualTuple() { + reportBuilder.AddIssue(false, "the Success matcher does not support multiple values") + } else { + reportBuilder.AddIssue(false, "asserting a non-error type with Succeed matcher") + } + return true + } + + if bool(config.ForceSucceedForFuncs) && !gexp.GetActualArg().(*actual.ErrPayload).IsFunc() { + gexp.ReverseAssertionFuncLogic() + gexp.SetMatcherHaveOccurred() + + reportBuilder.AddIssue(true, "prefer using the HaveOccurred matcher for non-function error value, instead of Succeed") + + return true + } + + return false +} diff --git a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go index 574fdfadf3..188b2b5f91 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go @@ -1,24 +1,16 @@ package linter import ( - "bytes" - "fmt" "go/ast" - "go/constant" - "go/printer" - "go/token" - gotypes "go/types" - "reflect" - "github.com/go-toolsmith/astcopy" "golang.org/x/tools/go/analysis" + "github.com/nunnatsa/ginkgolinter/internal/expression" + "github.com/nunnatsa/ginkgolinter/internal/formatter" "github.com/nunnatsa/ginkgolinter/internal/ginkgohandler" "github.com/nunnatsa/ginkgolinter/internal/gomegahandler" - "github.com/nunnatsa/ginkgolinter/internal/interfaces" - "github.com/nunnatsa/ginkgolinter/internal/intervals" "github.com/nunnatsa/ginkgolinter/internal/reports" - "github.com/nunnatsa/ginkgolinter/internal/reverseassertion" + "github.com/nunnatsa/ginkgolinter/internal/rules" "github.com/nunnatsa/ginkgolinter/types" ) @@ -26,62 +18,6 @@ import ( // // For more details, look at the README.md file -const ( - linterName = "ginkgo-linter" - wrongLengthWarningTemplate = "wrong length assertion" - wrongCapWarningTemplate = "wrong cap assertion" - wrongNilWarningTemplate = "wrong nil assertion" - wrongBoolWarningTemplate = "wrong boolean assertion" - wrongErrWarningTemplate = "wrong error assertion" - wrongCompareWarningTemplate = "wrong comparison assertion" - doubleNegativeWarningTemplate = "avoid double negative assertion" - valueInEventually = "use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed" - comparePointerToValue = "comparing a pointer to a value will always fail" - missingAssertionMessage = linterName + `: %q: missing assertion method. Expected %s` - focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q" - focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it" - compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()" - matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)" - matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed" - matchErrorMissingDescription = "missing function description as second parameter of MatchError" - matchErrorRedundantArg = "redundant MatchError arguments; consider removing them" - matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)" - forceExpectToTemplate = "must not use Expect with %s" - useBeforeEachTemplate = "use BeforeEach() to assign variable %s" -) - -const ( // gomega matchers - beEmpty = "BeEmpty" - beEquivalentTo = "BeEquivalentTo" - beFalse = "BeFalse" - beIdenticalTo = "BeIdenticalTo" - beNil = "BeNil" - beNumerically = "BeNumerically" - beTrue = "BeTrue" - beZero = "BeZero" - equal = "Equal" - haveLen = "HaveLen" - haveCap = "HaveCap" - haveOccurred = "HaveOccurred" - haveValue = "HaveValue" - not = "Not" - omega = "Ω" - succeed = "Succeed" - and = "And" - or = "Or" - withTransform = "WithTransform" - matchError = "MatchError" -) - -const ( // gomega actuals - expect = "Expect" - expectWithOffset = "ExpectWithOffset" - eventually = "Eventually" - eventuallyWithOffset = "EventuallyWithOffset" - consistently = "Consistently" - consistentlyWithOffset = "ConsistentlyWithOffset" -) - type GinkgoLinter struct { config *types.Config } @@ -94,7 +30,7 @@ func NewGinkgoLinter(config *types.Config) *GinkgoLinter { } // Run is the main assertion function -func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) { +func (l *GinkgoLinter) Run(pass *analysis.Pass) (any, error) { for _, file := range pass.Files { fileConfig := l.config.Clone() @@ -102,39 +38,20 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) { fileConfig.UpdateFromFile(cm) - gomegaHndlr := gomegahandler.GetGomegaHandler(file) + gomegaHndlr := gomegahandler.GetGomegaHandler(file, pass) ginkgoHndlr := ginkgohandler.GetGinkgoHandler(file) if gomegaHndlr == nil && ginkgoHndlr == nil { // no gomega or ginkgo imports => no use in gomega in this file; nothing to do here continue } - timePks := "" - for _, imp := range file.Imports { - if imp.Path.Value == `"time"` { - if imp.Name == nil { - timePks = "time" - } else { - timePks = imp.Name.Name - } - } - } - ast.Inspect(file, func(n ast.Node) bool { if ginkgoHndlr != nil { goDeeper := false spec, ok := n.(*ast.ValueSpec) if ok { for _, val := range spec.Values { - if exp, ok := val.(*ast.CallExpr); ok { - if bool(fileConfig.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) { - goDeeper = true - } - - if bool(fileConfig.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) { - goDeeper = true - } - } + goDeeper = ginkgoHndlr.HandleGinkgoSpecs(val, fileConfig, pass) || goDeeper } } if goDeeper { @@ -147,1527 +64,68 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) { return true } - config := fileConfig.Clone() - - if comments, ok := cm[stmt]; ok { - config.UpdateFromComment(comments) - } - // search for function calls assertionExp, ok := stmt.X.(*ast.CallExpr) if !ok { return true } + config := fileConfig.Clone() + if comments, ok := cm[stmt]; ok { + config.UpdateFromComment(comments) + } + if ginkgoHndlr != nil { - goDeeper := false - if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) { - goDeeper = true - } - if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, assertionExp) { - goDeeper = true - } - if goDeeper { + if ginkgoHndlr.HandleGinkgoSpecs(assertionExp, config, pass) { return true } } - // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. This is - // mostly to prevent nil pointer error. + // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. if gomegaHndlr == nil { return true } - assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr) - if !ok { - checkNoAssertion(pass, assertionExp, gomegaHndlr) - return true - } - - if !isAssertionFunc(assertionFunc.Sel.Name) { - checkNoAssertion(pass, assertionExp, gomegaHndlr) - return true - } - - actualExpr := gomegaHndlr.GetActualExpr(assertionFunc) - if actualExpr == nil { + gexp, ok := expression.New(assertionExp, pass, gomegaHndlr, getTimePkg(file)) + if !ok || gexp == nil { return true } - return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr, timePks) + reportBuilder := reports.NewBuilder(assertionExp, formatter.NewGoFmtFormatter(pass.Fset)) + return checkGomegaExpression(gexp, config, reportBuilder, pass) }) } return nil, nil } -func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool { - foundSomething := false - if ginkgoHndlr.IsWrapContainer(exp) { - for _, arg := range exp.Args { - if fn, ok := arg.(*ast.FuncLit); ok { - if fn.Body != nil { - if checkAssignments(pass, fn.Body.List) { - foundSomething = true - } - break - } - } - } - } - - return foundSomething -} - -func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool { - foundSomething := false - for _, stmt := range list { - switch st := stmt.(type) { - case *ast.DeclStmt: - if gen, ok := st.Decl.(*ast.GenDecl); ok { - if gen.Tok != token.VAR { - continue - } - for _, spec := range gen.Specs { - if valSpec, ok := spec.(*ast.ValueSpec); ok { - if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) { - foundSomething = true - } - } - } - } - - case *ast.AssignStmt: - for i, val := range st.Rhs { - if !is[*ast.FuncLit](val) { - if id, isIdent := st.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" { - reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name) - foundSomething = true - } - } - } - - case *ast.IfStmt: - if st.Body != nil { - if checkAssignments(pass, st.Body.List) { - foundSomething = true - } - } - if st.Else != nil { - if block, isBlock := st.Else.(*ast.BlockStmt); isBlock { - if checkAssignments(pass, block.List) { - foundSomething = true - } - } - } - } - } - - return foundSomething -} - -func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool { - foundSomething := false - for i, val := range values { - if !is[*ast.FuncLit](val) { - reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name) - foundSomething = true - } - } - - return foundSomething -} - -func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool { - foundFocus := false - isFocus, id := ginkgoHndlr.GetFocusContainerName(exp) - if isFocus { - reportNewName(pass, id, id.Name[1:], focusContainerFound, id.Name) - foundFocus = true - } - - if id != nil && ginkgohandler.IsContainer(id.Name) { - for _, arg := range exp.Args { - if ginkgoHndlr.IsFocusSpec(arg) { - reportNoFix(pass, arg.Pos(), focusSpecFound) - foundFocus = true - } else if callExp, ok := arg.(*ast.CallExpr); ok { - if checkFocusContainer(pass, ginkgoHndlr, callExp) { // handle table entries - foundFocus = true - } - } - } - } - - return foundFocus -} - -func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, timePkg string) bool { - expr := astcopy.CallExpr(assertionExp) - - reportBuilder := reports.NewBuilder(pass.Fset, expr) - +func checkGomegaExpression(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder, pass *analysis.Pass) bool { goNested := false - if checkAsyncAssertion(pass, config, expr, actualExpr, handler, reportBuilder, timePkg) { + if rules.GetMissingAssertionRule().Apply(gexp, config, reportBuilder) { goNested = true } else { - - actualArg := getActualArg(actualExpr, handler) - if actualArg == nil { - return true - } - - if config.ForceExpectTo { - goNested = forceExpectTo(expr, handler, reportBuilder) || goNested + if gexp.IsAsync() { + rules.GetAsyncRules().Apply(gexp, config, reportBuilder) + goNested = true + } else { + rules.GetRules().Apply(gexp, config, reportBuilder) } - - goNested = doCheckExpression(pass, config, assertionExp, actualArg, expr, handler, reportBuilder) || goNested } if reportBuilder.HasReport() { - reportBuilder.SetFixOffer(pass.Fset, expr) + reportBuilder.SetFixOffer(gexp.GetClone()) pass.Report(reportBuilder.Build()) } return goNested } -func forceExpectTo(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - if asrtFun, ok := expr.Fun.(*ast.SelectorExpr); ok { - if actualFuncName, ok := handler.GetActualFuncName(expr); ok && actualFuncName == expect { - var ( - name string - newIdent *ast.Ident - ) - - switch name = asrtFun.Sel.Name; name { - case "Should": - newIdent = ast.NewIdent("To") - case "ShouldNot": - newIdent = ast.NewIdent("ToNot") - default: - return false - } - - handler.ReplaceFunction(expr, newIdent) - reportBuilder.AddIssue(true, fmt.Sprintf(forceExpectToTemplate, name)) - return true - } - } - - return false -} - -func doCheckExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualArg ast.Expr, expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) { - return checkLengthMatcher(expr, pass, handler, reportBuilder) - - } else if !bool(config.SuppressLen) && isActualIsCapFunc(actualArg) { - return checkCapMatcher(expr, handler, reportBuilder) - - } else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil { - if isExprError(pass, nilable) { - if config.SuppressErr { - return true - } - } else if config.SuppressNil { - return true - } - - return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, reportBuilder) - - } else if first, second, op, ok := isComparison(pass, actualArg); ok { - matcher, shouldContinue := startCheckComparison(expr, handler) - if !shouldContinue { - return false - } - if !config.SuppressLen { - if isActualIsLenFunc(first) { - if handleLenComparison(pass, expr, matcher, first, second, op, handler, reportBuilder) { - return false - } - } - if isActualIsCapFunc(first) { - if handleCapComparison(expr, matcher, first, second, op, handler, reportBuilder) { - return false - } - } - } - return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, reportBuilder) - - } else if checkMatchError(pass, assertionExp, actualArg, handler, reportBuilder) { - return false - } else if isExprError(pass, actualArg) { - return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, reportBuilder) - - } else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, reportBuilder) { - return false - } else if !handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) { - return false - } else if !config.SuppressTypeCompare { - return !checkEqualWrongType(pass, assertionExp, actualArg, handler, reportBuilder) - } - - return true -} - -func checkMatchError(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return doCheckMatchError(pass, origExp, matcher, actualArg, handler, reportBuilder) -} - -func doCheckMatchError(pass *analysis.Pass, origExp *ast.CallExpr, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - name, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - switch name { - case matchError: - case not: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder) - case and, or: - res := false - for _, arg := range matcher.Args { - if nested, ok := arg.(*ast.CallExpr); ok { - if valid := doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder); valid { - res = true - } - } - } - return res - default: - return false - } - - if !isExprError(pass, actualArg) { - reportBuilder.AddIssue(false, matchErrorArgWrongType, goFmt(pass.Fset, actualArg)) - } - - expr := astcopy.CallExpr(matcher) - - validAssertion, requiredParams := checkMatchErrorAssertion(pass, matcher) - if !validAssertion { - reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, goFmt(pass.Fset, matcher.Args[0])) - } - - numParams := len(matcher.Args) - if numParams == requiredParams { - if numParams == 2 { - t := pass.TypesInfo.TypeOf(matcher.Args[1]) - if !gotypes.Identical(t, gotypes.Typ[gotypes.String]) { - reportBuilder.AddIssue(false, matchErrorNoFuncDescription) - return true - } - } - return true - } - - if requiredParams == 2 && numParams == 1 { - reportBuilder.AddIssue(false, matchErrorMissingDescription) - return true - } - - var newArgsSuggestion = []ast.Expr{expr.Args[0]} - if requiredParams == 2 { - newArgsSuggestion = append(newArgsSuggestion, expr.Args[1]) - } - expr.Args = newArgsSuggestion - - reportBuilder.AddIssue(true, matchErrorRedundantArg) - return true -} - -func checkMatchErrorAssertion(pass *analysis.Pass, matcher *ast.CallExpr) (bool, int) { - if isErrorMatcherValidArg(pass, matcher.Args[0]) { - return true, 1 - } - - t1 := pass.TypesInfo.TypeOf(matcher.Args[0]) - if isFuncErrBool(t1) { - return true, 2 - } - - return false, 0 -} - -// isFuncErrBool checks if a function is with the signature `func(error) bool` -func isFuncErrBool(t gotypes.Type) bool { - sig, ok := t.(*gotypes.Signature) - if !ok { - return false - } - if sig.Params().Len() != 1 || sig.Results().Len() != 1 { - return false - } - - if !interfaces.ImplementsError(sig.Params().At(0).Type()) { - return false - } - - b, ok := sig.Results().At(0).Type().(*gotypes.Basic) - if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool { - return true - } - - return false -} - -func isErrorMatcherValidArg(pass *analysis.Pass, arg ast.Expr) bool { - if isExprError(pass, arg) { - return true - } - - if t, ok := pass.TypesInfo.TypeOf(arg).(*gotypes.Basic); ok && t.Kind() == gotypes.String { - return true - } - - t := pass.TypesInfo.TypeOf(arg) - - return interfaces.ImplementsGomegaMatcher(t) -} - -func checkEqualWrongType(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, matcher, actualArg, handler, false, reportBuilder) -} - -func checkEqualDifferentTypes(pass *analysis.Pass, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, parentPointer bool, reportBuilder *reports.Builder) bool { - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - - actualType := pass.TypesInfo.TypeOf(actualArg) - - switch matcherFuncName { - case equal, beIdenticalTo: // continue - case and, or: - foundIssue := false - for _, nestedExp := range matcher.Args { - nested, ok := nestedExp.(*ast.CallExpr) - if !ok { - continue - } - if checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) { - foundIssue = true - } - } - - return foundIssue - case withTransform: - nested, ok := matcher.Args[1].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok = handler.GetActualFuncName(nested) - switch matcherFuncName { - case equal, beIdenticalTo: - case not: - return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) - default: - return false - } - - if t := getFuncType(pass, matcher.Args[0]); t != nil { - actualType = t - matcher = nested - - if !ok { - return false - } - } else { - return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) - } - - case not: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) - - case haveValue: - nested, ok := matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - return checkEqualDifferentTypes(pass, nested, actualArg, handler, true, reportBuilder) - default: - return false - } - - matcherValue := matcher.Args[0] - - switch act := actualType.(type) { - case *gotypes.Tuple: - actualType = act.At(0).Type() - case *gotypes.Pointer: - if parentPointer { - actualType = act.Elem() - } - } - - matcherType := pass.TypesInfo.TypeOf(matcherValue) - - if !reflect.DeepEqual(matcherType, actualType) { - // Equal can handle comparison of interface and a value that implements it - if isImplementing(matcherType, actualType) || isImplementing(actualType, matcherType) { - return false - } - - reportBuilder.AddIssue(false, compareDifferentTypes, matcherFuncName, actualType, matcherType) - return true - } - - return false -} - -func getFuncType(pass *analysis.Pass, expr ast.Expr) gotypes.Type { - switch f := expr.(type) { - case *ast.FuncLit: - if f.Type != nil && f.Type.Results != nil && len(f.Type.Results.List) > 0 { - return pass.TypesInfo.TypeOf(f.Type.Results.List[0].Type) - } - case *ast.Ident: - a := pass.TypesInfo.TypeOf(f) - if sig, ok := a.(*gotypes.Signature); ok && sig.Results().Len() > 0 { - return sig.Results().At(0).Type() - } - } - - return nil -} - -func isImplementing(ifs, impl gotypes.Type) bool { - if gotypes.IsInterface(ifs) { - - var ( - theIfs *gotypes.Interface - ok bool - ) - - for { - theIfs, ok = ifs.(*gotypes.Interface) - if ok { - break - } - ifs = ifs.Underlying() - } - - return gotypes.Implements(impl, theIfs) - } - return false -} - -// be careful - never change origExp!!! only modify its clone, expr!!! -func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - if !isPointer(pass, actualArg) { - return false - } - matcher, ok := origExp.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return false - } - - // not using recurse here, since we need the original expression, in order to get the TypeInfo, while we should not - // modify it. - for matcherFuncName == not { - reverseAssertionFuncLogic(expr) - expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] - matcher, ok = matcher.Args[0].(*ast.CallExpr) - if !ok { - return false - } - - matcherFuncName, ok = handler.GetActualFuncName(matcher) - if !ok { - return false - } - } - - switch matcherFuncName { - case equal, beIdenticalTo, beEquivalentTo: - arg := matcher.Args[0] - if isPointer(pass, arg) { - return false - } - if isNil(arg) { - return false - } - if isInterface(pass, arg) { - return false - } - case beFalse, beTrue, beNumerically: - default: - return false - } - - handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) - - args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))} - handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue)) - expr.Args[0].(*ast.CallExpr).Args = args - - reportBuilder.AddIssue(true, comparePointerToValue) - return true -} - -// check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is -// done on the returned value, instead of polling the result of a function, for instance. -func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder, timePkg string) bool { - funcName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return false - } - - var funcIndex int - switch funcName { - case eventually, consistently: - funcIndex = 0 - case eventuallyWithOffset, consistentlyWithOffset: - funcIndex = 1 - default: - return false - } - - if !config.SuppressAsync && len(actualExpr.Args) > funcIndex { - t := pass.TypesInfo.TypeOf(actualExpr.Args[funcIndex]) - - // skip context variable, if used as first argument - if "context.Context" == t.String() { - funcIndex++ - } - - if len(actualExpr.Args) > funcIndex { - if fun, funcCall := actualExpr.Args[funcIndex].(*ast.CallExpr); funcCall { - t = pass.TypesInfo.TypeOf(fun) - if !isValidAsyncValueType(t) { - actualExpr = handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - - if len(fun.Args) > 0 { - origArgs := actualExpr.Args - origFunc := actualExpr.Fun - actualExpr.Args = fun.Args - - origArgs[funcIndex] = fun.Fun - call := &ast.SelectorExpr{ - Sel: ast.NewIdent("WithArguments"), - X: &ast.CallExpr{ - Fun: origFunc, - Args: origArgs, - }, - } - - actualExpr.Fun = call - actualExpr.Args = fun.Args - actualExpr = actualExpr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr) - } else { - actualExpr.Args[funcIndex] = fun.Fun - } - - reportBuilder.AddIssue(true, valueInEventually, funcName, funcName) - } - } - } - - if config.ValidateAsyncIntervals { - intervals.CheckIntervals(pass, expr, actualExpr, reportBuilder, handler, timePkg, funcIndex) +func getTimePkg(file *ast.File) string { + timePkg := "time" + for _, imp := range file.Imports { + if imp.Path.Value == `"time"` && imp.Name != nil { + timePkg = imp.Name.Name } } - handleAssertionOnly(pass, config, expr, handler, actualExpr, reportBuilder) - return true -} - -func isValidAsyncValueType(t gotypes.Type) bool { - switch t.(type) { - // allow functions that return function or channel. - case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer: - return true - case *gotypes.Named: - return isValidAsyncValueType(t.Underlying()) - } - - return false -} - -func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*ast.CallExpr, bool) { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return nil, false - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return nil, false - } - - switch matcherFuncName { - case beTrue: - case beFalse: - reverseAssertionFuncLogic(exp) - case equal: - boolean, found := matcher.Args[0].(*ast.Ident) - if !found { - return nil, false - } - - if boolean.Name == "false" { - reverseAssertionFuncLogic(exp) - } else if boolean.Name != "true" { - return nil, false - } - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return startCheckComparison(exp, handler) - - default: - return nil, false - } - - return matcher, true -} - -func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, reportBuilder *reports.Builder) bool { - fun, ok := exp.Fun.(*ast.SelectorExpr) - if !ok { - return true - } - - call := handler.GetActualExpr(fun) - if call == nil { - return true - } - - switch op { - case token.EQL: - handleEqualComparison(pass, matcher, first, second, handler) - - case token.NEQ: - reverseAssertionFuncLogic(exp) - handleEqualComparison(pass, matcher, first, second, handler) - case token.GTR, token.GEQ, token.LSS, token.LEQ: - if !isNumeric(pass, first) { - return true - } - handler.ReplaceFunction(matcher, ast.NewIdent(beNumerically)) - matcher.Args = []ast.Expr{ - &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf(`"%s"`, op.String())}, - second, - } - default: - return true - } - - call.Args = []ast.Expr{first} - reportBuilder.AddIssue(true, wrongCompareWarningTemplate) - return false -} - -func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, handler gomegahandler.Handler) { - if isZero(pass, second) { - handler.ReplaceFunction(matcher, ast.NewIdent(beZero)) - matcher.Args = nil - } else { - t := pass.TypesInfo.TypeOf(first) - if gotypes.IsInterface(t) { - handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) - } else if is[*gotypes.Pointer](t) { - handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo)) - } else { - handler.ReplaceFunction(matcher, ast.NewIdent(equal)) - } - - matcher.Args = []ast.Expr{second} - } -} - -func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - switch op { - case token.EQL: - case token.NEQ: - reverseAssertionFuncLogic(exp) - default: - return false - } - - var eql *ast.Ident - if isZero(pass, second) { - eql = ast.NewIdent(beEmpty) - } else { - eql = ast.NewIdent(haveLen) - matcher.Args = []ast.Expr{second} - } - - handler.ReplaceFunction(matcher, eql) - firstLen, ok := first.(*ast.CallExpr) // assuming it's len() - if !ok { - return false // should never happen - } - - val := firstLen.Args[0] - fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr)) - fun.Args = []ast.Expr{val} - - reportBuilder.AddIssue(true, wrongLengthWarningTemplate) - return true -} - -func handleCapComparison(exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - switch op { - case token.EQL: - case token.NEQ: - reverseAssertionFuncLogic(exp) - default: - return false - } - - eql := ast.NewIdent(haveCap) - matcher.Args = []ast.Expr{second} - - handler.ReplaceFunction(matcher, eql) - firstLen, ok := first.(*ast.CallExpr) // assuming it's len() - if !ok { - return false // should never happen - } - - val := firstLen.Args[0] - fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr)) - fun.Args = []ast.Expr{val} - - reportBuilder.AddIssue(true, wrongCapWarningTemplate) - return true -} - -// Check if the "actual" argument is a call to the golang built-in len() function -func isActualIsLenFunc(actualArg ast.Expr) bool { - return checkActualFuncName(actualArg, "len") -} - -// Check if the "actual" argument is a call to the golang built-in len() function -func isActualIsCapFunc(actualArg ast.Expr) bool { - return checkActualFuncName(actualArg, "cap") -} - -func checkActualFuncName(actualArg ast.Expr, name string) bool { - lenArgExp, ok := actualArg.(*ast.CallExpr) - if !ok { - return false - } - - lenFunc, ok := lenArgExp.Fun.(*ast.Ident) - return ok && lenFunc.Name == name -} - -// Check if matcher function is in one of the patterns we want to avoid -func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return true - } - - switch matcherFuncName { - case equal: - handleEqualLenMatcher(matcher, pass, exp, handler, reportBuilder) - return false - - case beZero: - handleBeZero(exp, handler, reportBuilder) - return false - - case beNumerically: - return handleBeNumerically(matcher, pass, exp, handler, reportBuilder) - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return checkLengthMatcher(exp, pass, handler, reportBuilder) - - default: - return true - } -} - -// Check if matcher function is in one of the patterns we want to avoid -func checkCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return true - } - - switch matcherFuncName { - case equal: - handleEqualCapMatcher(matcher, exp, handler, reportBuilder) - return false - - case beZero: - handleCapBeZero(exp, handler, reportBuilder) - return false - - case beNumerically: - return handleCapBeNumerically(matcher, exp, handler, reportBuilder) - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return checkCapMatcher(exp, handler, reportBuilder) - - default: - return true - } -} - -// Check if matcher function is in one of the patterns we want to avoid -func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, reportBuilder *reports.Builder) bool { - matcher, ok := exp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - matcherFuncName, ok := handler.GetActualFuncName(matcher) - if !ok { - return true - } - - switch matcherFuncName { - case equal: - handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, reportBuilder) - - case beTrue: - handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder) - - case beFalse: - reverseAssertionFuncLogic(exp) - handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder) - - case not: - reverseAssertionFuncLogic(exp) - exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0] - return checkNilMatcher(exp, pass, nilable, handler, notEqual, reportBuilder) - - default: - return true - } - return false -} - -func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool { - if len(assertionExp.Args) == 0 { - return true - } - - equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - funcName, ok := handler.GetActualFuncName(equalFuncExpr) - if !ok { - return true - } - - switch funcName { - case beNil: // no additional processing needed. - case equal: - - if len(equalFuncExpr.Args) == 0 { - return true - } - - nilable, ok := equalFuncExpr.Args[0].(*ast.Ident) - if !ok || nilable.Name != "nil" { - return true - } - - case not: - reverseAssertionFuncLogic(assertionExp) - assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0] - return checkNilError(pass, assertionExp, handler, actualArg, reportBuilder) - default: - return true - } - - var newFuncName string - if is[*ast.CallExpr](actualArg) { - newFuncName = succeed - } else { - reverseAssertionFuncLogic(assertionExp) - newFuncName = haveOccurred - } - - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName)) - equalFuncExpr.Args = nil - - reportBuilder.AddIssue(true, wrongErrWarningTemplate) - return false -} - -// handleAssertionOnly checks use-cases when the actual value is valid, but only the assertion should be fixed -// it handles: -// -// Equal(nil) => BeNil() -// Equal(true) => BeTrue() -// Equal(false) => BeFalse() -// HaveLen(0) => BeEmpty() -func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool { - if len(expr.Args) == 0 { - return true - } - - equalFuncExpr, ok := expr.Args[0].(*ast.CallExpr) - if !ok { - return true - } - - funcName, ok := handler.GetActualFuncName(equalFuncExpr) - if !ok { - return true - } - - switch funcName { - case equal: - if len(equalFuncExpr.Args) == 0 { - return true - } - - tkn, ok := equalFuncExpr.Args[0].(*ast.Ident) - if !ok { - return true - } - - var replacement string - var template string - switch tkn.Name { - case "nil": - if config.SuppressNil { - return true - } - replacement = beNil - template = wrongNilWarningTemplate - case "true": - replacement = beTrue - template = wrongBoolWarningTemplate - case "false": - if isNegativeAssertion(expr) { - reverseAssertionFuncLogic(expr) - replacement = beTrue - } else { - replacement = beFalse - } - template = wrongBoolWarningTemplate - default: - return true - } - - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement)) - equalFuncExpr.Args = nil - - reportBuilder.AddIssue(true, template) - return false - - case beFalse: - if isNegativeAssertion(expr) { - reverseAssertionFuncLogic(expr) - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue)) - reportBuilder.AddIssue(true, doubleNegativeWarningTemplate) - return false - } - return false - - case haveLen: - if config.AllowHaveLen0 { - return true - } - - if len(equalFuncExpr.Args) > 0 { - if isZero(pass, equalFuncExpr.Args[0]) { - handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty)) - equalFuncExpr.Args = nil - reportBuilder.AddIssue(true, wrongLengthWarningTemplate) - return false - } - } - - return true - - case not: - reverseAssertionFuncLogic(expr) - expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0] - return handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) - default: - return true - } -} - -func isZero(pass *analysis.Pass, arg ast.Expr) bool { - if val, ok := arg.(*ast.BasicLit); ok && val.Kind == token.INT && val.Value == "0" { - return true - } - info, ok := pass.TypesInfo.Types[arg] - if ok { - if t, ok := info.Type.(*gotypes.Basic); ok && t.Kind() == gotypes.Int && info.Value != nil { - if i, ok := constant.Int64Val(info.Value); ok && i == 0 { - return true - } - } - } else if val, ok := arg.(*ast.Ident); ok && val.Obj != nil && val.Obj.Kind == ast.Con { - if spec, ok := val.Obj.Decl.(*ast.ValueSpec); ok { - if len(spec.Values) == 1 { - if value, ok := spec.Values[0].(*ast.BasicLit); ok && value.Kind == token.INT && value.Value == "0" { - return true - } - } - } - } - - return false -} - -// getActualArg checks that the function is an assertion's actual function and return the "actual" parameter. If the -// function is not assertion's actual function, return nil. -func getActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) ast.Expr { - funcName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return nil - } - - switch funcName { - case expect, omega: - return actualExpr.Args[0] - case expectWithOffset: - return actualExpr.Args[1] - default: - return nil - } -} - -// Replace the len function call by its parameter, to create a fix suggestion -func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) { - name, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return - } - - switch name { - case expect, omega: - arg := actualExpr.Args[0] - if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) { - // replace the len function call by its parameter, to create a fix suggestion - actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0] - } - case expectWithOffset: - arg := actualExpr.Args[1] - if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) { - // replace the len function call by its parameter, to create a fix suggestion - actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0] - } - } -} - -// Replace the nil comparison with the compared object, to create a fix suggestion -func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr) bool { - actualFuncName, ok := handler.GetActualFuncName(actualExpr) - if !ok { - return false - } - - switch actualFuncName { - case expect, omega: - actualExpr.Args[0] = nilable - return true - - case expectWithOffset: - actualExpr.Args[1] = nilable - return true - - default: - return false - } -} - -// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number -func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - opExp, ok1 := matcher.Args[0].(*ast.BasicLit) - valExp, ok2 := matcher.Args[1].(*ast.BasicLit) - - if ok1 && ok2 { - op := opExp.Value - val := valExp.Value - - if (op == `">"` && val == "0") || (op == `">="` && val == "1") { - reverseAssertionFuncLogic(exp) - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) - exp.Args[0].(*ast.CallExpr).Args = nil - } else if op == `"=="` { - chooseNumericMatcher(pass, exp, handler, valExp) - } else if op == `"!="` { - reverseAssertionFuncLogic(exp) - chooseNumericMatcher(pass, exp, handler, valExp) - } else { - return true - } - - reportLengthAssertion(exp, handler, reportBuilder) - return false - } - return true -} - -// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number -func handleCapBeNumerically(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool { - opExp, ok1 := matcher.Args[0].(*ast.BasicLit) - valExp, ok2 := matcher.Args[1].(*ast.BasicLit) - - if ok1 && ok2 { - op := opExp.Value - val := valExp.Value - - if (op == `">"` && val == "0") || (op == `">="` && val == "1") { - reverseAssertionFuncLogic(exp) - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}} - } else if op == `"=="` { - replaceNumericCapMatcher(exp, handler, valExp) - } else if op == `"!="` { - reverseAssertionFuncLogic(exp) - replaceNumericCapMatcher(exp, handler, valExp) - } else { - return true - } - - reportCapAssertion(exp, handler, reportBuilder) - return false - } - return true -} - -func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) { - caller := exp.Args[0].(*ast.CallExpr) - if isZero(pass, valExp) { - handler.ReplaceFunction(caller, ast.NewIdent(beEmpty)) - exp.Args[0].(*ast.CallExpr).Args = nil - } else { - handler.ReplaceFunction(caller, ast.NewIdent(haveLen)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp} - } -} - -func replaceNumericCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) { - caller := exp.Args[0].(*ast.CallExpr) - handler.ReplaceFunction(caller, ast.NewIdent(haveCap)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp} -} - -func reverseAssertionFuncLogic(exp *ast.CallExpr) { - assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel - assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name) -} - -func isNegativeAssertion(exp *ast.CallExpr) bool { - assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel - return reverseassertion.IsNegativeLogic(assertionFunc.Name) -} - -func handleEqualLenMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - equalTo, ok := matcher.Args[0].(*ast.BasicLit) - if ok { - chooseNumericMatcher(pass, exp, handler, equalTo) - } else { - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]} - } - reportLengthAssertion(exp, handler, reportBuilder) -} - -func handleEqualCapMatcher(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]} - reportCapAssertion(exp, handler, reportBuilder) -} - -func handleBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - exp.Args[0].(*ast.CallExpr).Args = nil - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty)) - reportLengthAssertion(exp, handler, reportBuilder) -} - -func handleCapBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - exp.Args[0].(*ast.CallExpr).Args = nil - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap)) - exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}} - reportCapAssertion(exp, handler, reportBuilder) -} - -func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) { - equalTo, ok := matcher.Args[0].(*ast.Ident) - if !ok { - return - } - - if equalTo.Name == "false" { - reverseAssertionFuncLogic(exp) - } else if equalTo.Name != "true" { - return - } - - newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) - - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) - exp.Args[0].(*ast.CallExpr).Args = nil - - reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder) -} - -func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) { - newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable) - handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName)) - exp.Args[0].(*ast.CallExpr).Args = nil - - reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder) -} - -func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) { - newFuncName := beNil - isItError := isExprError(pass, nilable) - if isItError { - if is[*ast.CallExpr](nilable) { - newFuncName = succeed - } else { - reverseAssertionFuncLogic(exp) - newFuncName = haveOccurred - } - } - - return newFuncName, isItError -} - -func isAssertionFunc(name string) bool { - switch name { - case "To", "ToNot", "NotTo", "Should", "ShouldNot": - return true - } - return false -} - -func reportLengthAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - replaceLenActualArg(actualExpr, handler) - - reportBuilder.AddIssue(true, wrongLengthWarningTemplate) -} - -func reportCapAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - replaceLenActualArg(actualExpr, handler) - - reportBuilder.AddIssue(true, wrongCapWarningTemplate) -} - -func reportNilAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, isItError bool, reportBuilder *reports.Builder) { - actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr)) - changed := replaceNilActualArg(actualExpr, handler, nilable) - if !changed { - return - } - - if notEqual { - reverseAssertionFuncLogic(expr) - } - template := wrongNilWarningTemplate - if isItError { - template = wrongErrWarningTemplate - } - - reportBuilder.AddIssue(true, template) -} - -func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, messageTemplate, oldExpr string) { - pass.Report(analysis.Diagnostic{ - Pos: id.Pos(), - Message: fmt.Sprintf(messageTemplate, newName), - SuggestedFixes: []analysis.SuggestedFix{ - { - Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName), - TextEdits: []analysis.TextEdit{ - { - Pos: id.Pos(), - End: id.End(), - NewText: []byte(newName), - }, - }, - }, - }, - }) -} - -func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) { - if len(args) > 0 { - message = fmt.Sprintf(message, args...) - } - - pass.Report(analysis.Diagnostic{ - Pos: pos, - Message: message, - }) -} - -func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) { - bin, ok := actualArg.(*ast.BinaryExpr) - if !ok { - return nil, token.ILLEGAL - } - - if bin.Op == token.EQL || bin.Op == token.NEQ { - if isNil(bin.Y) { - return bin.X, bin.Op - } else if isNil(bin.X) { - return bin.Y, bin.Op - } - } - - return nil, token.ILLEGAL -} - -func isNil(expr ast.Expr) bool { - nilObject, ok := expr.(*ast.Ident) - return ok && nilObject.Name == "nil" && nilObject.Obj == nil -} - -func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr, token.Token, bool) { - bin, ok := actualArg.(*ast.BinaryExpr) - if !ok { - return nil, nil, token.ILLEGAL, false - } - - first, second, op := bin.X, bin.Y, bin.Op - replace := false - switch realFirst := first.(type) { - case *ast.Ident: // check if const - info, ok := pass.TypesInfo.Types[realFirst] - if ok { - if is[*gotypes.Basic](info.Type) && info.Value != nil { - replace = true - } - } - - case *ast.BasicLit: - replace = true - } - - if replace { - first, second = second, first - } - - switch op { - case token.EQL: - case token.NEQ: - case token.GTR, token.GEQ, token.LSS, token.LEQ: - if replace { - op = reverseassertion.ChangeCompareOperator(op) - } - default: - return nil, nil, token.ILLEGAL, false - } - return first, second, op, true -} - -func goFmt(fset *token.FileSet, x ast.Expr) string { - var b bytes.Buffer - _ = printer.Fprint(&b, fset, x) - return b.String() -} - -func isExprError(pass *analysis.Pass, expr ast.Expr) bool { - actualArgType := pass.TypesInfo.TypeOf(expr) - switch t := actualArgType.(type) { - case *gotypes.Named: - if interfaces.ImplementsError(actualArgType) { - return true - } - case *gotypes.Tuple: - if t.Len() > 0 { - switch t0 := t.At(0).Type().(type) { - case *gotypes.Named, *gotypes.Pointer: - if interfaces.ImplementsError(t0) { - return true - } - } - } - } - return false -} - -func isPointer(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - return is[*gotypes.Pointer](t) -} - -func isInterface(pass *analysis.Pass, expr ast.Expr) bool { - t := pass.TypesInfo.TypeOf(expr) - return gotypes.IsInterface(t) -} - -func isNumeric(pass *analysis.Pass, node ast.Expr) bool { - t := pass.TypesInfo.TypeOf(node) - - switch t.String() { - case "int", "uint", "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64", "float32", "float64": - return true - } - return false -} - -func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) { - funcName, ok := handler.GetActualFuncName(expr) - if ok { - var allowedFunction string - switch funcName { - case expect, expectWithOffset: - allowedFunction = `"To()", "ToNot()" or "NotTo()"` - case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset: - allowedFunction = `"Should()" or "ShouldNot()"` - case omega: - allowedFunction = `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"` - default: - return - } - reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName, allowedFunction) - } -} - -func is[T any](x any) bool { - _, matchType := x.(T) - return matchType + return timePkg } diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go index b6838e5244..0aadd3416f 100644 --- a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go +++ b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go @@ -28,6 +28,7 @@ type Config struct { ForceExpectTo Boolean ValidateAsyncIntervals Boolean ForbidSpecPollution Boolean + ForceSucceedForFuncs Boolean } func (s *Config) AllTrue() bool { @@ -47,6 +48,7 @@ func (s *Config) Clone() Config { ForceExpectTo: s.ForceExpectTo, ValidateAsyncIntervals: s.ValidateAsyncIntervals, ForbidSpecPollution: s.ForbidSpecPollution, + ForceSucceedForFuncs: s.ForceSucceedForFuncs, } } diff --git a/vendor/github.com/raeperd/recvcheck/.gitignore b/vendor/github.com/raeperd/recvcheck/.gitignore new file mode 100644 index 0000000000..035dc07e37 --- /dev/null +++ b/vendor/github.com/raeperd/recvcheck/.gitignore @@ -0,0 +1,2 @@ +coverage.txt +cmd/recvcheck/recvcheck diff --git a/vendor/github.com/lufeee/execinquery/LICENSE b/vendor/github.com/raeperd/recvcheck/LICENSE similarity index 97% rename from vendor/github.com/lufeee/execinquery/LICENSE rename to vendor/github.com/raeperd/recvcheck/LICENSE index b6ab14aec3..a46db59be3 100644 --- a/vendor/github.com/lufeee/execinquery/LICENSE +++ b/vendor/github.com/raeperd/recvcheck/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2022 lufe +Copyright (c) 2024 raeperd Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/vendor/github.com/raeperd/recvcheck/Makefile b/vendor/github.com/raeperd/recvcheck/Makefile new file mode 100644 index 0000000000..45ca47d9b6 --- /dev/null +++ b/vendor/github.com/raeperd/recvcheck/Makefile @@ -0,0 +1,14 @@ +all: build test lint + +download: + go mod download + +build: download + go build -C cmd/recvcheck + +test: + go test -race -coverprofile=coverage.txt . + +lint: + golangci-lint run + diff --git a/vendor/github.com/raeperd/recvcheck/README.md b/vendor/github.com/raeperd/recvcheck/README.md new file mode 100644 index 0000000000..db84fe38e2 --- /dev/null +++ b/vendor/github.com/raeperd/recvcheck/README.md @@ -0,0 +1,52 @@ +# recvcheck +[![.github/workflows/build.yaml](https://github.com/raeperd/recvcheck/actions/workflows/build.yaml/badge.svg)](https://github.com/raeperd/recvcheck/actions/workflows/build.yaml) [![Go Report Card](https://goreportcard.com/badge/github.com/raeperd/recvcheck)](https://goreportcard.com/report/github.com/raeperd/recvcheck) [![codecov](https://codecov.io/gh/raeperd/recvcheck/graph/badge.svg?token=fPYgEHlq1e)](https://codecov.io/gh/raeperd/recvcheck) +Golang linter for check receiver type in method + +## Motivtation +From [Go Wiki: Go Code Review Comments - The Go Programming Language](https://go.dev/wiki/CodeReviewComments#receiver-type) +> Don’t mix receiver types. Choose either pointers or struct types for all available method + +Following code from [Dave Chenney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race) causes data race. Could you find it? +This linter does it for you. + +```go +package main + +import ( + "fmt" + "time" +) + +type RPC struct { + result int + done chan struct{} +} + +func (rpc *RPC) compute() { + time.Sleep(time.Second) // strenuous computation intensifies + rpc.result = 42 + close(rpc.done) +} + +func (RPC) version() int { + return 1 // never going to need to change this +} + +func main() { + rpc := &RPC{done: make(chan struct{})} + + go rpc.compute() // kick off computation in the background + version := rpc.version() // grab some other information while we're waiting + <-rpc.done // wait for computation to finish + result := rpc.result + + fmt.Printf("RPC computation complete, result: %d, version: %d\n", result, version) +} +``` + +## References +- [Is there a way to detect following data race code using golangci-lint or other linter?? · golangci/golangci-lint · Discussion #5006](https://github.com/golangci/golangci-lint/discussions/5006) + - [Wednesday pop quiz: spot the race | Dave Cheney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race) + + + diff --git a/vendor/github.com/raeperd/recvcheck/analyzer.go b/vendor/github.com/raeperd/recvcheck/analyzer.go new file mode 100644 index 0000000000..e80dfc5776 --- /dev/null +++ b/vendor/github.com/raeperd/recvcheck/analyzer.go @@ -0,0 +1,69 @@ +package recvcheck + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +var Analyzer = &analysis.Analyzer{ + Name: "recvcheck", + Doc: "checks for receiver type consistency", + Run: run, + Requires: []*analysis.Analyzer{inspect.Analyzer}, +} + +func run(pass *analysis.Pass) (any, error) { + inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + structs := map[string]*structType{} + inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(n ast.Node) { + funcDecl, ok := n.(*ast.FuncDecl) + if !ok || funcDecl.Recv == nil || len(funcDecl.Recv.List) != 1 { + return + } + + var recv *ast.Ident + var isStar bool + switch recvType := funcDecl.Recv.List[0].Type.(type) { + case *ast.StarExpr: + isStar = true + if recv, ok = recvType.X.(*ast.Ident); !ok { + return + } + case *ast.Ident: + recv = recvType + default: + return + } + + var st *structType + st, ok = structs[recv.Name] + if !ok { + structs[recv.Name] = &structType{recv: recv.Name} + st = structs[recv.Name] + } + + if isStar { + st.numStarMethod++ + } else { + st.numTypeMethod++ + } + }) + + for _, st := range structs { + if st.numStarMethod > 0 && st.numTypeMethod > 0 { + pass.Reportf(pass.Pkg.Scope().Lookup(st.recv).Pos(), "the methods of %q use pointer receiver and non-pointer receiver.", st.recv) + } + } + + return nil, nil +} + +type structType struct { + recv string + numStarMethod int + numTypeMethod int +} diff --git a/vendor/github.com/rivo/uniseg/README.md b/vendor/github.com/rivo/uniseg/README.md index 25e9346874..a8191b8154 100644 --- a/vendor/github.com/rivo/uniseg/README.md +++ b/vendor/github.com/rivo/uniseg/README.md @@ -3,7 +3,7 @@ [![Go Reference](https://pkg.go.dev/badge/github.com/rivo/uniseg.svg)](https://pkg.go.dev/github.com/rivo/uniseg) [![Go Report](https://img.shields.io/badge/go%20report-A%2B-brightgreen.svg)](https://goreportcard.com/report/github.com/rivo/uniseg) -This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 14.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html). +This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 15.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html). ## Background @@ -73,7 +73,7 @@ for gr.Next() { ### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function -This is orders of magnitude faster than the `Graphemes` class, but it requires the handling of states and boundaries: +This avoids allocating a new `Graphemes` object but it requires the handling of states and boundaries: ```go str := "🇩🇪🏳️‍🌈" @@ -88,29 +88,7 @@ for len(str) > 0 { ### Advanced Examples -Breaking into grapheme clusters and evaluating line breaks: - -```go -str := "First line.\nSecond line." -state := -1 -var ( - c string - boundaries int -) -for len(str) > 0 { - c, str, boundaries, state = uniseg.StepString(str, state) - fmt.Print(c) - if boundaries&uniseg.MaskLine == uniseg.LineCanBreak { - fmt.Print("|") - } else if boundaries&uniseg.MaskLine == uniseg.LineMustBreak { - fmt.Print("‖") - } -} -// First |line. -// ‖Second |line.‖ -``` - -If you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString): +The [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class offers the most convenient way to access all functionality of this package. But in some cases, it may be better to use the specialized functions directly. For example, if you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString): ```go str := "Hello, world!" @@ -133,6 +111,8 @@ Similarly, use - [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and - [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries). +If you're only interested in the width of characters, use [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString). It is much faster than using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step), [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString), or the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class because it does not include the logic for word / sentence / line boundaries. + Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString): ```go diff --git a/vendor/github.com/rivo/uniseg/eastasianwidth.go b/vendor/github.com/rivo/uniseg/eastasianwidth.go index 661934ac2d..5fc54d9915 100644 --- a/vendor/github.com/rivo/uniseg/eastasianwidth.go +++ b/vendor/github.com/rivo/uniseg/eastasianwidth.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // eastAsianWidth are taken from -// https://www.unicode.org/Public/14.0.0/ucd/EastAsianWidth.txt +// https://www.unicode.org/Public/15.0.0/ucd/EastAsianWidth.txt // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var eastAsianWidth = [][3]int{ {0x0000, 0x001F, prN}, // Cc [32] .. @@ -504,6 +504,7 @@ var eastAsianWidth = [][3]int{ {0x0CE2, 0x0CE3, prN}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL {0x0CE6, 0x0CEF, prN}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE {0x0CF1, 0x0CF2, prN}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA + {0x0CF3, 0x0CF3, prN}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT {0x0D00, 0x0D01, prN}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU {0x0D02, 0x0D03, prN}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA {0x0D04, 0x0D0C, prN}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L @@ -565,7 +566,7 @@ var eastAsianWidth = [][3]int{ {0x0EBD, 0x0EBD, prN}, // Lo LAO SEMIVOWEL SIGN NYO {0x0EC0, 0x0EC4, prN}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI {0x0EC6, 0x0EC6, prN}, // Lm LAO KO LA - {0x0EC8, 0x0ECD, prN}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA + {0x0EC8, 0x0ECE, prN}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN {0x0ED0, 0x0ED9, prN}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE {0x0EDC, 0x0EDF, prN}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO {0x0F00, 0x0F00, prN}, // Lo TIBETAN SYLLABLE OM @@ -1916,6 +1917,7 @@ var eastAsianWidth = [][3]int{ {0x10EAB, 0x10EAC, prN}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK {0x10EAD, 0x10EAD, prN}, // Pd YEZIDI HYPHENATION MARK {0x10EB0, 0x10EB1, prN}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE + {0x10EFD, 0x10EFF, prN}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA {0x10F00, 0x10F1C, prN}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL {0x10F1D, 0x10F26, prN}, // No [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF {0x10F27, 0x10F27, prN}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH @@ -1998,6 +2000,8 @@ var eastAsianWidth = [][3]int{ {0x11236, 0x11237, prN}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA {0x11238, 0x1123D, prN}, // Po [6] KHOJKI DANDA..KHOJKI ABBREVIATION SIGN {0x1123E, 0x1123E, prN}, // Mn KHOJKI SIGN SUKUN + {0x1123F, 0x11240, prN}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I + {0x11241, 0x11241, prN}, // Mn KHOJKI VOWEL SIGN VOCALIC R {0x11280, 0x11286, prN}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA {0x11288, 0x11288, prN}, // Lo MULTANI LETTER GHA {0x1128A, 0x1128D, prN}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA @@ -2160,6 +2164,7 @@ var eastAsianWidth = [][3]int{ {0x11A9E, 0x11AA2, prN}, // Po [5] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2 {0x11AB0, 0x11ABF, prN}, // Lo [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA {0x11AC0, 0x11AF8, prN}, // Lo [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL + {0x11B00, 0x11B09, prN}, // Po [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU {0x11C00, 0x11C08, prN}, // Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L {0x11C0A, 0x11C2E, prN}, // Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA {0x11C2F, 0x11C2F, prN}, // Mc BHAIKSUKI VOWEL SIGN AA @@ -2205,6 +2210,19 @@ var eastAsianWidth = [][3]int{ {0x11EF3, 0x11EF4, prN}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U {0x11EF5, 0x11EF6, prN}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O {0x11EF7, 0x11EF8, prN}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION + {0x11F00, 0x11F01, prN}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA + {0x11F02, 0x11F02, prN}, // Lo KAWI SIGN REPHA + {0x11F03, 0x11F03, prN}, // Mc KAWI SIGN VISARGA + {0x11F04, 0x11F10, prN}, // Lo [13] KAWI LETTER A..KAWI LETTER O + {0x11F12, 0x11F33, prN}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA + {0x11F34, 0x11F35, prN}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA + {0x11F36, 0x11F3A, prN}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R + {0x11F3E, 0x11F3F, prN}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI + {0x11F40, 0x11F40, prN}, // Mn KAWI VOWEL SIGN EU + {0x11F41, 0x11F41, prN}, // Mc KAWI SIGN KILLER + {0x11F42, 0x11F42, prN}, // Mn KAWI CONJOINER + {0x11F43, 0x11F4F, prN}, // Po [13] KAWI DANDA..KAWI PUNCTUATION CLOSING SPIRAL + {0x11F50, 0x11F59, prN}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE {0x11FB0, 0x11FB0, prN}, // Lo LISU LETTER YHA {0x11FC0, 0x11FD4, prN}, // No [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH {0x11FD5, 0x11FDC, prN}, // So [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI @@ -2217,8 +2235,11 @@ var eastAsianWidth = [][3]int{ {0x12480, 0x12543, prN}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU {0x12F90, 0x12FF0, prN}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114 {0x12FF1, 0x12FF2, prN}, // Po [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302 - {0x13000, 0x1342E, prN}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 - {0x13430, 0x13438, prN}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT + {0x13000, 0x1342F, prN}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D + {0x13430, 0x1343F, prN}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE + {0x13440, 0x13440, prN}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY + {0x13441, 0x13446, prN}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN + {0x13447, 0x13455, prN}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED {0x14400, 0x14646, prN}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 {0x16800, 0x16A38, prN}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ {0x16A40, 0x16A5E, prN}, // Lo [31] MRO LETTER TA..MRO LETTER TEK @@ -2263,7 +2284,9 @@ var eastAsianWidth = [][3]int{ {0x1AFFD, 0x1AFFE, prW}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8 {0x1B000, 0x1B0FF, prW}, // Lo [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2 {0x1B100, 0x1B122, prW}, // Lo [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU + {0x1B132, 0x1B132, prW}, // Lo HIRAGANA LETTER SMALL KO {0x1B150, 0x1B152, prW}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO + {0x1B155, 0x1B155, prW}, // Lo KATAKANA LETTER SMALL KO {0x1B164, 0x1B167, prW}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N {0x1B170, 0x1B2FB, prW}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB {0x1BC00, 0x1BC6A, prN}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M @@ -2294,6 +2317,7 @@ var eastAsianWidth = [][3]int{ {0x1D200, 0x1D241, prN}, // So [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54 {0x1D242, 0x1D244, prN}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME {0x1D245, 0x1D245, prN}, // So GREEK MUSICAL LEIMMA + {0x1D2C0, 0x1D2D3, prN}, // No [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN {0x1D2E0, 0x1D2F3, prN}, // No [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN {0x1D300, 0x1D356, prN}, // So [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING {0x1D360, 0x1D378, prN}, // No [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE @@ -2353,11 +2377,14 @@ var eastAsianWidth = [][3]int{ {0x1DF00, 0x1DF09, prN}, // Ll [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK {0x1DF0A, 0x1DF0A, prN}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK {0x1DF0B, 0x1DF1E, prN}, // Ll [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL + {0x1DF25, 0x1DF2A, prN}, // Ll [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK {0x1E000, 0x1E006, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE {0x1E008, 0x1E018, prN}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU {0x1E01B, 0x1E021, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI {0x1E023, 0x1E024, prN}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS {0x1E026, 0x1E02A, prN}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA + {0x1E030, 0x1E06D, prN}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE + {0x1E08F, 0x1E08F, prN}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I {0x1E100, 0x1E12C, prN}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W {0x1E130, 0x1E136, prN}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D {0x1E137, 0x1E13D, prN}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER @@ -2370,6 +2397,10 @@ var eastAsianWidth = [][3]int{ {0x1E2EC, 0x1E2EF, prN}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI {0x1E2F0, 0x1E2F9, prN}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE {0x1E2FF, 0x1E2FF, prN}, // Sc WANCHO NGUN SIGN + {0x1E4D0, 0x1E4EA, prN}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL + {0x1E4EB, 0x1E4EB, prN}, // Lm NAG MUNDARI SIGN OJOD + {0x1E4EC, 0x1E4EF, prN}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH + {0x1E4F0, 0x1E4F9, prN}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE {0x1E7E0, 0x1E7E6, prN}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO {0x1E7E8, 0x1E7EB, prN}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE {0x1E7ED, 0x1E7EE, prN}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE @@ -2498,13 +2529,14 @@ var eastAsianWidth = [][3]int{ {0x1F6D0, 0x1F6D2, prW}, // So [3] PLACE OF WORSHIP..SHOPPING TROLLEY {0x1F6D3, 0x1F6D4, prN}, // So [2] STUPA..PAGODA {0x1F6D5, 0x1F6D7, prW}, // So [3] HINDU TEMPLE..ELEVATOR - {0x1F6DD, 0x1F6DF, prW}, // So [3] PLAYGROUND SLIDE..RING BUOY + {0x1F6DC, 0x1F6DF, prW}, // So [4] WIRELESS..RING BUOY {0x1F6E0, 0x1F6EA, prN}, // So [11] HAMMER AND WRENCH..NORTHEAST-POINTING AIRPLANE {0x1F6EB, 0x1F6EC, prW}, // So [2] AIRPLANE DEPARTURE..AIRPLANE ARRIVING {0x1F6F0, 0x1F6F3, prN}, // So [4] SATELLITE..PASSENGER SHIP {0x1F6F4, 0x1F6FC, prW}, // So [9] SCOOTER..ROLLER SKATE - {0x1F700, 0x1F773, prN}, // So [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE - {0x1F780, 0x1F7D8, prN}, // So [89] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NEGATIVE CIRCLED SQUARE + {0x1F700, 0x1F776, prN}, // So [119] ALCHEMICAL SYMBOL FOR QUINTESSENCE..LUNAR ECLIPSE + {0x1F77B, 0x1F77F, prN}, // So [5] HAUMEA..ORCUS + {0x1F780, 0x1F7D9, prN}, // So [90] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NINE POINTED WHITE STAR {0x1F7E0, 0x1F7EB, prW}, // So [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE {0x1F7F0, 0x1F7F0, prW}, // So HEAVY EQUALS SIGN {0x1F800, 0x1F80B, prN}, // So [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD @@ -2521,22 +2553,20 @@ var eastAsianWidth = [][3]int{ {0x1F947, 0x1F9FF, prW}, // So [185] FIRST PLACE MEDAL..NAZAR AMULET {0x1FA00, 0x1FA53, prN}, // So [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP {0x1FA60, 0x1FA6D, prN}, // So [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER - {0x1FA70, 0x1FA74, prW}, // So [5] BALLET SHOES..THONG SANDAL - {0x1FA78, 0x1FA7C, prW}, // So [5] DROP OF BLOOD..CRUTCH - {0x1FA80, 0x1FA86, prW}, // So [7] YO-YO..NESTING DOLLS - {0x1FA90, 0x1FAAC, prW}, // So [29] RINGED PLANET..HAMSA - {0x1FAB0, 0x1FABA, prW}, // So [11] FLY..NEST WITH EGGS - {0x1FAC0, 0x1FAC5, prW}, // So [6] ANATOMICAL HEART..PERSON WITH CROWN - {0x1FAD0, 0x1FAD9, prW}, // So [10] BLUEBERRIES..JAR - {0x1FAE0, 0x1FAE7, prW}, // So [8] MELTING FACE..BUBBLES - {0x1FAF0, 0x1FAF6, prW}, // So [7] HAND WITH INDEX FINGER AND THUMB CROSSED..HEART HANDS + {0x1FA70, 0x1FA7C, prW}, // So [13] BALLET SHOES..CRUTCH + {0x1FA80, 0x1FA88, prW}, // So [9] YO-YO..FLUTE + {0x1FA90, 0x1FABD, prW}, // So [46] RINGED PLANET..WING + {0x1FABF, 0x1FAC5, prW}, // So [7] GOOSE..PERSON WITH CROWN + {0x1FACE, 0x1FADB, prW}, // So [14] MOOSE..PEA POD + {0x1FAE0, 0x1FAE8, prW}, // So [9] MELTING FACE..SHAKING FACE + {0x1FAF0, 0x1FAF8, prW}, // So [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND {0x1FB00, 0x1FB92, prN}, // So [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK {0x1FB94, 0x1FBCA, prN}, // So [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON {0x1FBF0, 0x1FBF9, prN}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE {0x20000, 0x2A6DF, prW}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF {0x2A6E0, 0x2A6FF, prW}, // Cn [32] .. - {0x2A700, 0x2B738, prW}, // Lo [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738 - {0x2B739, 0x2B73F, prW}, // Cn [7] .. + {0x2A700, 0x2B739, prW}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739 + {0x2B73A, 0x2B73F, prW}, // Cn [6] .. {0x2B740, 0x2B81D, prW}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D {0x2B81E, 0x2B81F, prW}, // Cn [2] .. {0x2B820, 0x2CEA1, prW}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 @@ -2547,7 +2577,9 @@ var eastAsianWidth = [][3]int{ {0x2FA1E, 0x2FA1F, prW}, // Cn [2] .. {0x2FA20, 0x2FFFD, prW}, // Cn [1502] .. {0x30000, 0x3134A, prW}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A - {0x3134B, 0x3FFFD, prW}, // Cn [60595] .. + {0x3134B, 0x3134F, prW}, // Cn [5] .. + {0x31350, 0x323AF, prW}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF + {0x323B0, 0x3FFFD, prW}, // Cn [56398] .. {0xE0001, 0xE0001, prN}, // Cf LANGUAGE TAG {0xE0020, 0xE007F, prN}, // Cf [96] TAG SPACE..CANCEL TAG {0xE0100, 0xE01EF, prA}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 diff --git a/vendor/github.com/rivo/uniseg/emojipresentation.go b/vendor/github.com/rivo/uniseg/emojipresentation.go index fd0f7451af..9b5f499c4a 100644 --- a/vendor/github.com/rivo/uniseg/emojipresentation.go +++ b/vendor/github.com/rivo/uniseg/emojipresentation.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // emojiPresentation are taken from // // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var emojiPresentation = [][3]int{ {0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done @@ -211,6 +211,7 @@ var emojiPresentation = [][3]int{ {0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart {0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple {0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator + {0x1F6DC, 0x1F6DC, prEmojiPresentation}, // E15.0 [1] (🛜) wireless {0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy {0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival {0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe @@ -267,19 +268,28 @@ var emojiPresentation = [][3]int{ {0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet {0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts {0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal + {0x1FA75, 0x1FA77, prEmojiPresentation}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart {0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope {0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch {0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute {0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls + {0x1FA87, 0x1FA88, prEmojiPresentation}, // E15.0 [2] (🪇..🪈) maracas..flute {0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo {0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock {0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa + {0x1FAAD, 0x1FAAF, prEmojiPresentation}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda {0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather {0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs + {0x1FABB, 0x1FABD, prEmojiPresentation}, // E15.0 [3] (🪻..🪽) hyacinth..wing + {0x1FABF, 0x1FABF, prEmojiPresentation}, // E15.0 [1] (🪿) goose {0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging {0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown + {0x1FACE, 0x1FACF, prEmojiPresentation}, // E15.0 [2] (🫎..🫏) moose..donkey {0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot {0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar + {0x1FADA, 0x1FADB, prEmojiPresentation}, // E15.0 [2] (🫚..🫛) ginger root..pea pod {0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles + {0x1FAE8, 0x1FAE8, prEmojiPresentation}, // E15.0 [1] (🫨) shaking face {0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands + {0x1FAF7, 0x1FAF8, prEmojiPresentation}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand } diff --git a/vendor/github.com/rivo/uniseg/gen_breaktest.go b/vendor/github.com/rivo/uniseg/gen_breaktest.go index e613c4cd00..6bfbeb5e7f 100644 --- a/vendor/github.com/rivo/uniseg/gen_breaktest.go +++ b/vendor/github.com/rivo/uniseg/gen_breaktest.go @@ -32,7 +32,7 @@ import ( // We want to test against a specific version rather than the latest. When the // package is upgraded to a new version, change these to generate new tests. const ( - testCaseURL = `https://www.unicode.org/Public/14.0.0/ucd/auxiliary/%s.txt` + testCaseURL = `https://www.unicode.org/Public/15.0.0/ucd/auxiliary/%s.txt` ) func main() { @@ -76,9 +76,9 @@ func parse(url string) ([]byte, error) { buf := new(bytes.Buffer) buf.Grow(120 << 10) - buf.WriteString(`package uniseg + buf.WriteString(`// Code generated via go generate from gen_breaktest.go. DO NOT EDIT. -// Code generated via go generate from gen_breaktest.go. DO NOT EDIT. +package uniseg // ` + os.Args[3] + ` are Grapheme testcases taken from // ` + url + ` @@ -136,7 +136,9 @@ var ( // // E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷" // it will append -// "\u0020\u0308\U0001F1E6" +// +// "\u0020\u0308\U0001F1E6" +// // and "[][]rune{{0x0020,0x0308},{0x1F1E6},}" // to orig and exp respectively. // diff --git a/vendor/github.com/rivo/uniseg/gen_properties.go b/vendor/github.com/rivo/uniseg/gen_properties.go index 999d5efddf..8992d2c5f8 100644 --- a/vendor/github.com/rivo/uniseg/gen_properties.go +++ b/vendor/github.com/rivo/uniseg/gen_properties.go @@ -41,8 +41,8 @@ import ( // We want to test against a specific version rather than the latest. When the // package is upgraded to a new version, change these to generate new tests. const ( - propertyURL = `https://www.unicode.org/Public/14.0.0/ucd/%s.txt` - emojiURL = `https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt` + propertyURL = `https://www.unicode.org/Public/15.0.0/ucd/%s.txt` + emojiURL = `https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt` ) // The regular expression for a line containing a code point range property. @@ -178,6 +178,11 @@ func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (stri } } + // Avoid overflow during binary search. + if len(properties) >= 1<<31 { + return "", errors.New("too many properties") + } + // Sort properties. sort.Slice(properties, func(i, j int) bool { left, _ := strconv.ParseUint(properties[i][0], 16, 64) @@ -200,9 +205,9 @@ func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (stri // ` + emojiURL + ` // ("Extended_Pictographic" only)` } - buf.WriteString(`package uniseg + buf.WriteString(`// Code generated via go generate from gen_properties.go. DO NOT EDIT. -// Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg // ` + os.Args[3] + ` are taken from // ` + propertyURL + emojiComment + ` diff --git a/vendor/github.com/rivo/uniseg/grapheme.go b/vendor/github.com/rivo/uniseg/grapheme.go index 0086fc1b20..b12403d43c 100644 --- a/vendor/github.com/rivo/uniseg/grapheme.go +++ b/vendor/github.com/rivo/uniseg/grapheme.go @@ -13,9 +13,10 @@ import "unicode/utf8" // well as boundary information and character width is available via the various // methods (see examples below). // -// Using this class to iterate over a string is convenient but it is much slower -// than using this package's [Step] or [StepString] functions or any of the -// other specialized functions starting with "First". +// This class basically wraps the [StepString] parser and provides a convenient +// interface to it. If you are only interested in some parts of this package's +// functionality, using the specialized functions starting with "First" is +// almost always faster. type Graphemes struct { // The original string. original string @@ -222,7 +223,7 @@ func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, new if len(b) <= length { // If we're already past the end, there is nothing else to parse. var prop int if state < 0 { - prop = property(graphemeCodePoints, r) + prop = propertyGraphemes(r) } else { prop = state >> shiftGraphemePropState } @@ -252,16 +253,14 @@ func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, new return b[:length], b[length:], width, state | (prop << shiftGraphemePropState) } - if r == vs16 { - width = 2 - } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL { - width += runeWidth(r, prop) - } else if firstProp == prExtendedPictographic { + if firstProp == prExtendedPictographic { if r == vs15 { width = 1 - } else { + } else if r == vs16 { width = 2 } + } else if firstProp != prRegionalIndicator && firstProp != prL { + width += runeWidth(r, prop) } length += l @@ -284,7 +283,7 @@ func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, if len(str) <= length { // If we're already past the end, there is nothing else to parse. var prop int if state < 0 { - prop = property(graphemeCodePoints, r) + prop = propertyGraphemes(r) } else { prop = state >> shiftGraphemePropState } @@ -314,16 +313,14 @@ func FirstGraphemeClusterInString(str string, state int) (cluster, rest string, return str[:length], str[length:], width, state | (prop << shiftGraphemePropState) } - if r == vs16 { - width = 2 - } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL { - width += runeWidth(r, prop) - } else if firstProp == prExtendedPictographic { + if firstProp == prExtendedPictographic { if r == vs15 { width = 1 - } else { + } else if r == vs16 { width = 2 } + } else if firstProp != prRegionalIndicator && firstProp != prL { + width += runeWidth(r, prop) } length += l diff --git a/vendor/github.com/rivo/uniseg/graphemeproperties.go b/vendor/github.com/rivo/uniseg/graphemeproperties.go index a87d140bf2..0aff4a619a 100644 --- a/vendor/github.com/rivo/uniseg/graphemeproperties.go +++ b/vendor/github.com/rivo/uniseg/graphemeproperties.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // graphemeCodePoints are taken from -// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/GraphemeBreakProperty.txt +// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var graphemeCodePoints = [][3]int{ {0x0000, 0x0009, prControl}, // Cc [10] .. @@ -143,6 +143,7 @@ var graphemeCodePoints = [][3]int{ {0x0CCC, 0x0CCD, prExtend}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA {0x0CD5, 0x0CD6, prExtend}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL + {0x0CF3, 0x0CF3, prSpacingMark}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU {0x0D02, 0x0D03, prSpacingMark}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA {0x0D3B, 0x0D3C, prExtend}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA @@ -172,7 +173,7 @@ var graphemeCodePoints = [][3]int{ {0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN {0x0EB3, 0x0EB3, prSpacingMark}, // Lo LAO VOWEL SIGN AM {0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO - {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA + {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN {0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS {0x0F35, 0x0F35, prExtend}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA {0x0F37, 0x0F37, prExtend}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS @@ -1336,6 +1337,7 @@ var graphemeCodePoints = [][3]int{ {0x10AE5, 0x10AE6, prExtend}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW {0x10D24, 0x10D27, prExtend}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK + {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA {0x10F46, 0x10F50, prExtend}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW {0x10F82, 0x10F85, prExtend}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW {0x11000, 0x11000, prSpacingMark}, // Mc BRAHMI SIGN CANDRABINDU @@ -1375,6 +1377,7 @@ var graphemeCodePoints = [][3]int{ {0x11235, 0x11235, prSpacingMark}, // Mc KHOJKI SIGN VIRAMA {0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN + {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R {0x112DF, 0x112DF, prExtend}, // Mn KHUDAWADI SIGN ANUSVARA {0x112E0, 0x112E2, prSpacingMark}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II {0x112E3, 0x112EA, prExtend}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA @@ -1494,7 +1497,18 @@ var graphemeCodePoints = [][3]int{ {0x11D97, 0x11D97, prExtend}, // Mn GUNJALA GONDI VIRAMA {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U {0x11EF5, 0x11EF6, prSpacingMark}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O - {0x13430, 0x13438, prControl}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT + {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA + {0x11F02, 0x11F02, prPrepend}, // Lo KAWI SIGN REPHA + {0x11F03, 0x11F03, prSpacingMark}, // Mc KAWI SIGN VISARGA + {0x11F34, 0x11F35, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA + {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R + {0x11F3E, 0x11F3F, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI + {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU + {0x11F41, 0x11F41, prSpacingMark}, // Mc KAWI SIGN KILLER + {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER + {0x13430, 0x1343F, prControl}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE + {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY + {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED {0x16AF0, 0x16AF4, prExtend}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE {0x16B30, 0x16B36, prExtend}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM {0x16F4F, 0x16F4F, prExtend}, // Mn MIAO SIGN CONSONANT MODIFIER BAR @@ -1527,9 +1541,11 @@ var graphemeCodePoints = [][3]int{ {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA + {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D {0x1E2AE, 0x1E2AE, prExtend}, // Mn TOTO SIGN RISING TONE {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI + {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH {0x1E8D0, 0x1E8D6, prExtend}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS {0x1E944, 0x1E94A, prExtend}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA {0x1F000, 0x1F003, prExtendedPictographic}, // E0.0 [4] (🀀..🀃) MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND @@ -1780,7 +1796,8 @@ var graphemeCodePoints = [][3]int{ {0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA {0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple {0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator - {0x1F6D8, 0x1F6DC, prExtendedPictographic}, // E0.0 [5] (🛘..🛜) .. + {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (🛘..🛛) .. + {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless {0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy {0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat {0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE @@ -1797,7 +1814,7 @@ var graphemeCodePoints = [][3]int{ {0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw {0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate {0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (🛽..🛿) .. - {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) .. + {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS {0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE.. {0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square {0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (🟬..🟯) .. @@ -1856,30 +1873,37 @@ var graphemeCodePoints = [][3]int{ {0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING.. {0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts {0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal - {0x1FA75, 0x1FA77, prExtendedPictographic}, // E0.0 [3] (🩵..🩷) .. + {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart {0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope {0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch {0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (🩽..🩿) .. {0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute {0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls - {0x1FA87, 0x1FA8F, prExtendedPictographic}, // E0.0 [9] (🪇..🪏) .. + {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute + {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (🪉..🪏) .. {0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo {0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock {0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa - {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E0.0 [3] (🪭..🪯) .. + {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda {0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather {0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs - {0x1FABB, 0x1FABF, prExtendedPictographic}, // E0.0 [5] (🪻..🪿) .. + {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing + {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] (🪾) + {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose {0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging {0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown - {0x1FAC6, 0x1FACF, prExtendedPictographic}, // E0.0 [10] (🫆..🫏) .. + {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (🫆..🫍) .. + {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey {0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot {0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar - {0x1FADA, 0x1FADF, prExtendedPictographic}, // E0.0 [6] (🫚..🫟) .. + {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod + {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (🫜..🫟) .. {0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles - {0x1FAE8, 0x1FAEF, prExtendedPictographic}, // E0.0 [8] (🫨..🫯) .. + {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face + {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (🫩..🫯) .. {0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands - {0x1FAF7, 0x1FAFF, prExtendedPictographic}, // E0.0 [9] (🫷..🫿) .. + {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand + {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (🫹..🫿) .. {0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (🰀..🿽) .. {0xE0000, 0xE0000, prControl}, // Cn {0xE0001, 0xE0001, prControl}, // Cf LANGUAGE TAG diff --git a/vendor/github.com/rivo/uniseg/graphemerules.go b/vendor/github.com/rivo/uniseg/graphemerules.go index 9f46b575bb..5d399d29c8 100644 --- a/vendor/github.com/rivo/uniseg/graphemerules.go +++ b/vendor/github.com/rivo/uniseg/graphemerules.go @@ -21,11 +21,12 @@ const ( grBoundary ) -// The grapheme cluster parser's state transitions. Maps (state, property) to -// (new state, breaking instruction, rule number). The breaking instruction -// always refers to the boundary between the last and next code point. +// grTransitions implements the grapheme cluster parser's state transitions. +// Maps state and property to a new state, a breaking instruction, and rule +// number. The breaking instruction always refers to the boundary between the +// last and next code point. Returns negative values if no transition is found. // -// This map is queried as follows: +// This function is used as follows: // // 1. Find specific state + specific property. Stop if found. // 2. Find specific state + any property. @@ -36,59 +37,96 @@ const ( // are equal. Stop. // 6. Assume grAny and grBoundary. // -// Unicode version 14.0.0. -var grTransitions = map[[2]int][3]int{ +// Unicode version 15.0.0. +func grTransitions(state, prop int) (newState int, newProp int, boundary int) { + // It turns out that using a big switch statement is much faster than using + // a map. + + switch uint64(state) | uint64(prop)<<32 { // GB5 - {grAny, prCR}: {grCR, grBoundary, 50}, - {grAny, prLF}: {grControlLF, grBoundary, 50}, - {grAny, prControl}: {grControlLF, grBoundary, 50}, + case grAny | prCR<<32: + return grCR, grBoundary, 50 + case grAny | prLF<<32: + return grControlLF, grBoundary, 50 + case grAny | prControl<<32: + return grControlLF, grBoundary, 50 // GB4 - {grCR, prAny}: {grAny, grBoundary, 40}, - {grControlLF, prAny}: {grAny, grBoundary, 40}, - - // GB3. - {grCR, prLF}: {grControlLF, grNoBoundary, 30}, - - // GB6. - {grAny, prL}: {grL, grBoundary, 9990}, - {grL, prL}: {grL, grNoBoundary, 60}, - {grL, prV}: {grLVV, grNoBoundary, 60}, - {grL, prLV}: {grLVV, grNoBoundary, 60}, - {grL, prLVT}: {grLVTT, grNoBoundary, 60}, - - // GB7. - {grAny, prLV}: {grLVV, grBoundary, 9990}, - {grAny, prV}: {grLVV, grBoundary, 9990}, - {grLVV, prV}: {grLVV, grNoBoundary, 70}, - {grLVV, prT}: {grLVTT, grNoBoundary, 70}, - - // GB8. - {grAny, prLVT}: {grLVTT, grBoundary, 9990}, - {grAny, prT}: {grLVTT, grBoundary, 9990}, - {grLVTT, prT}: {grLVTT, grNoBoundary, 80}, - - // GB9. - {grAny, prExtend}: {grAny, grNoBoundary, 90}, - {grAny, prZWJ}: {grAny, grNoBoundary, 90}, - - // GB9a. - {grAny, prSpacingMark}: {grAny, grNoBoundary, 91}, - - // GB9b. - {grAny, prPrepend}: {grPrepend, grBoundary, 9990}, - {grPrepend, prAny}: {grAny, grNoBoundary, 92}, - - // GB11. - {grAny, prExtendedPictographic}: {grExtendedPictographic, grBoundary, 9990}, - {grExtendedPictographic, prExtend}: {grExtendedPictographic, grNoBoundary, 110}, - {grExtendedPictographic, prZWJ}: {grExtendedPictographicZWJ, grNoBoundary, 110}, - {grExtendedPictographicZWJ, prExtendedPictographic}: {grExtendedPictographic, grNoBoundary, 110}, - - // GB12 / GB13. - {grAny, prRegionalIndicator}: {grRIOdd, grBoundary, 9990}, - {grRIOdd, prRegionalIndicator}: {grRIEven, grNoBoundary, 120}, - {grRIEven, prRegionalIndicator}: {grRIOdd, grBoundary, 120}, + case grCR | prAny<<32: + return grAny, grBoundary, 40 + case grControlLF | prAny<<32: + return grAny, grBoundary, 40 + + // GB3 + case grCR | prLF<<32: + return grControlLF, grNoBoundary, 30 + + // GB6 + case grAny | prL<<32: + return grL, grBoundary, 9990 + case grL | prL<<32: + return grL, grNoBoundary, 60 + case grL | prV<<32: + return grLVV, grNoBoundary, 60 + case grL | prLV<<32: + return grLVV, grNoBoundary, 60 + case grL | prLVT<<32: + return grLVTT, grNoBoundary, 60 + + // GB7 + case grAny | prLV<<32: + return grLVV, grBoundary, 9990 + case grAny | prV<<32: + return grLVV, grBoundary, 9990 + case grLVV | prV<<32: + return grLVV, grNoBoundary, 70 + case grLVV | prT<<32: + return grLVTT, grNoBoundary, 70 + + // GB8 + case grAny | prLVT<<32: + return grLVTT, grBoundary, 9990 + case grAny | prT<<32: + return grLVTT, grBoundary, 9990 + case grLVTT | prT<<32: + return grLVTT, grNoBoundary, 80 + + // GB9 + case grAny | prExtend<<32: + return grAny, grNoBoundary, 90 + case grAny | prZWJ<<32: + return grAny, grNoBoundary, 90 + + // GB9a + case grAny | prSpacingMark<<32: + return grAny, grNoBoundary, 91 + + // GB9b + case grAny | prPrepend<<32: + return grPrepend, grBoundary, 9990 + case grPrepend | prAny<<32: + return grAny, grNoBoundary, 92 + + // GB11 + case grAny | prExtendedPictographic<<32: + return grExtendedPictographic, grBoundary, 9990 + case grExtendedPictographic | prExtend<<32: + return grExtendedPictographic, grNoBoundary, 110 + case grExtendedPictographic | prZWJ<<32: + return grExtendedPictographicZWJ, grNoBoundary, 110 + case grExtendedPictographicZWJ | prExtendedPictographic<<32: + return grExtendedPictographic, grNoBoundary, 110 + + // GB12 / GB13 + case grAny | prRegionalIndicator<<32: + return grRIOdd, grBoundary, 9990 + case grRIOdd | prRegionalIndicator<<32: + return grRIEven, grNoBoundary, 120 + case grRIEven | prRegionalIndicator<<32: + return grRIOdd, grBoundary, 120 + default: + return -1, -1, -1 + } } // transitionGraphemeState determines the new state of the grapheme cluster @@ -97,40 +135,40 @@ var grTransitions = map[[2]int][3]int{ // table) and whether a cluster boundary was detected. func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) { // Determine the property of the next character. - prop = property(graphemeCodePoints, r) + prop = propertyGraphemes(r) // Find the applicable transition. - transition, ok := grTransitions[[2]int{state, prop}] - if ok { + nextState, nextProp, _ := grTransitions(state, prop) + if nextState >= 0 { // We have a specific transition. We'll use it. - return transition[0], prop, transition[1] == grBoundary + return nextState, prop, nextProp == grBoundary } // No specific transition found. Try the less specific ones. - transAnyProp, okAnyProp := grTransitions[[2]int{state, prAny}] - transAnyState, okAnyState := grTransitions[[2]int{grAny, prop}] - if okAnyProp && okAnyState { + anyPropState, anyPropProp, anyPropRule := grTransitions(state, prAny) + anyStateState, anyStateProp, anyStateRule := grTransitions(grAny, prop) + if anyPropState >= 0 && anyStateState >= 0 { // Both apply. We'll use a mix (see comments for grTransitions). - newState = transAnyState[0] - boundary = transAnyState[1] == grBoundary - if transAnyProp[2] < transAnyState[2] { - boundary = transAnyProp[1] == grBoundary + newState = anyStateState + boundary = anyStateProp == grBoundary + if anyPropRule < anyStateRule { + boundary = anyPropProp == grBoundary } return } - if okAnyProp { + if anyPropState >= 0 { // We only have a specific state. - return transAnyProp[0], prop, transAnyProp[1] == grBoundary + return anyPropState, prop, anyPropProp == grBoundary // This branch will probably never be reached because okAnyState will // always be true given the current transition map. But we keep it here // for future modifications to the transition map where this may not be // true anymore. } - if okAnyState { + if anyStateState >= 0 { // We only have a specific property. - return transAnyState[0], prop, transAnyState[1] == grBoundary + return anyStateState, prop, anyStateProp == grBoundary } // No known transition. GB999: Any ÷ Any. diff --git a/vendor/github.com/rivo/uniseg/line.go b/vendor/github.com/rivo/uniseg/line.go index 87f28503f4..7a46318d93 100644 --- a/vendor/github.com/rivo/uniseg/line.go +++ b/vendor/github.com/rivo/uniseg/line.go @@ -80,7 +80,7 @@ func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool } } -// FirstLineSegmentInString is like FirstLineSegment() but its input and outputs +// FirstLineSegmentInString is like [FirstLineSegment] but its input and outputs // are strings. func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) { // An empty byte slice returns nothing. @@ -122,13 +122,13 @@ func FirstLineSegmentInString(str string, state int) (segment, rest string, must // [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm func HasTrailingLineBreak(b []byte) bool { r, _ := utf8.DecodeLastRune(b) - property, _ := propertyWithGenCat(lineBreakCodePoints, r) - return property == lbBK || property == lbCR || property == lbLF || property == lbNL + property, _ := propertyLineBreak(r) + return property == prBK || property == prCR || property == prLF || property == prNL } // HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string. func HasTrailingLineBreakInString(str string) bool { r, _ := utf8.DecodeLastRuneInString(str) - property, _ := propertyWithGenCat(lineBreakCodePoints, r) - return property == lbBK || property == lbCR || property == lbLF || property == lbNL + property, _ := propertyLineBreak(r) + return property == prBK || property == prCR || property == prLF || property == prNL } diff --git a/vendor/github.com/rivo/uniseg/lineproperties.go b/vendor/github.com/rivo/uniseg/lineproperties.go index 32169306e8..ac7fac4c05 100644 --- a/vendor/github.com/rivo/uniseg/lineproperties.go +++ b/vendor/github.com/rivo/uniseg/lineproperties.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // lineBreakCodePoints are taken from -// https://www.unicode.org/Public/14.0.0/ucd/LineBreak.txt +// https://www.unicode.org/Public/15.0.0/ucd/LineBreak.txt // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var lineBreakCodePoints = [][4]int{ {0x0000, 0x0008, prCM, gcCc}, // [9] .. @@ -439,6 +439,7 @@ var lineBreakCodePoints = [][4]int{ {0x0CE2, 0x0CE3, prCM, gcMn}, // [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL {0x0CE6, 0x0CEF, prNU, gcNd}, // [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE {0x0CF1, 0x0CF2, prAL, gcLo}, // [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA + {0x0CF3, 0x0CF3, prCM, gcMc}, // KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT {0x0D00, 0x0D01, prCM, gcMn}, // [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU {0x0D02, 0x0D03, prCM, gcMc}, // [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA {0x0D04, 0x0D0C, prAL, gcLo}, // [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L @@ -500,7 +501,7 @@ var lineBreakCodePoints = [][4]int{ {0x0EBD, 0x0EBD, prSA, gcLo}, // LAO SEMIVOWEL SIGN NYO {0x0EC0, 0x0EC4, prSA, gcLo}, // [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI {0x0EC6, 0x0EC6, prSA, gcLm}, // LAO KO LA - {0x0EC8, 0x0ECD, prSA, gcMn}, // [6] LAO TONE MAI EK..LAO NIGGAHITA + {0x0EC8, 0x0ECE, prSA, gcMn}, // [7] LAO TONE MAI EK..LAO YAMAKKAN {0x0ED0, 0x0ED9, prNU, gcNd}, // [10] LAO DIGIT ZERO..LAO DIGIT NINE {0x0EDC, 0x0EDF, prSA, gcLo}, // [4] LAO HO NO..LAO LETTER KHMU NYO {0x0F00, 0x0F00, prAL, gcLo}, // TIBETAN SYLLABLE OM @@ -813,7 +814,11 @@ var lineBreakCodePoints = [][4]int{ {0x1D79, 0x1D7F, prAL, gcLl}, // [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE {0x1D80, 0x1D9A, prAL, gcLl}, // [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK {0x1D9B, 0x1DBF, prAL, gcLm}, // [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA - {0x1DC0, 0x1DFF, prCM, gcMn}, // [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW + {0x1DC0, 0x1DCC, prCM, gcMn}, // [13] COMBINING DOTTED GRAVE ACCENT..COMBINING MACRON-BREVE + {0x1DCD, 0x1DCD, prGL, gcMn}, // COMBINING DOUBLE CIRCUMFLEX ABOVE + {0x1DCE, 0x1DFB, prCM, gcMn}, // [46] COMBINING OGONEK ABOVE..COMBINING DELETION MARK + {0x1DFC, 0x1DFC, prGL, gcMn}, // COMBINING DOUBLE INVERTED BREVE BELOW + {0x1DFD, 0x1DFF, prCM, gcMn}, // [3] COMBINING ALMOST EQUAL TO BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW {0x1E00, 0x1EFF, prAL, gcLC}, // [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP {0x1F00, 0x1F15, prAL, gcLC}, // [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA {0x1F18, 0x1F1D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA @@ -889,7 +894,7 @@ var lineBreakCodePoints = [][4]int{ {0x2054, 0x2054, prAL, gcPc}, // INVERTED UNDERTIE {0x2055, 0x2055, prAL, gcPo}, // FLOWER PUNCTUATION MARK {0x2056, 0x2056, prBA, gcPo}, // THREE DOT PUNCTUATION - {0x2057, 0x2057, prAL, gcPo}, // QUADRUPLE PRIME + {0x2057, 0x2057, prPO, gcPo}, // QUADRUPLE PRIME {0x2058, 0x205B, prBA, gcPo}, // [4] FOUR DOT PUNCTUATION..FOUR DOT MARK {0x205C, 0x205C, prAL, gcPo}, // DOTTED CROSS {0x205D, 0x205E, prBA, gcPo}, // [2] TRICOLON..VERTICAL FOUR DOTS @@ -2751,6 +2756,7 @@ var lineBreakCodePoints = [][4]int{ {0x10EAB, 0x10EAC, prCM, gcMn}, // [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK {0x10EAD, 0x10EAD, prBA, gcPd}, // YEZIDI HYPHENATION MARK {0x10EB0, 0x10EB1, prAL, gcLo}, // [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE + {0x10EFD, 0x10EFF, prCM, gcMn}, // [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA {0x10F00, 0x10F1C, prAL, gcLo}, // [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL {0x10F1D, 0x10F26, prAL, gcNo}, // [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF {0x10F27, 0x10F27, prAL, gcLo}, // OLD SOGDIAN LIGATURE AYIN-DALETH @@ -2840,6 +2846,8 @@ var lineBreakCodePoints = [][4]int{ {0x1123B, 0x1123C, prBA, gcPo}, // [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK {0x1123D, 0x1123D, prAL, gcPo}, // KHOJKI ABBREVIATION SIGN {0x1123E, 0x1123E, prCM, gcMn}, // KHOJKI SIGN SUKUN + {0x1123F, 0x11240, prAL, gcLo}, // [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I + {0x11241, 0x11241, prCM, gcMn}, // KHOJKI VOWEL SIGN VOCALIC R {0x11280, 0x11286, prAL, gcLo}, // [7] MULTANI LETTER A..MULTANI LETTER GA {0x11288, 0x11288, prAL, gcLo}, // MULTANI LETTER GHA {0x1128A, 0x1128D, prAL, gcLo}, // [4] MULTANI LETTER CA..MULTANI LETTER JJA @@ -3013,6 +3021,7 @@ var lineBreakCodePoints = [][4]int{ {0x11AA1, 0x11AA2, prBA, gcPo}, // [2] SOYOMBO TERMINAL MARK-1..SOYOMBO TERMINAL MARK-2 {0x11AB0, 0x11ABF, prAL, gcLo}, // [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA {0x11AC0, 0x11AF8, prAL, gcLo}, // [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL + {0x11B00, 0x11B09, prBB, gcPo}, // [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU {0x11C00, 0x11C08, prAL, gcLo}, // [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L {0x11C0A, 0x11C2E, prAL, gcLo}, // [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA {0x11C2F, 0x11C2F, prCM, gcMc}, // BHAIKSUKI VOWEL SIGN AA @@ -3059,6 +3068,20 @@ var lineBreakCodePoints = [][4]int{ {0x11EF3, 0x11EF4, prCM, gcMn}, // [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U {0x11EF5, 0x11EF6, prCM, gcMc}, // [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O {0x11EF7, 0x11EF8, prAL, gcPo}, // [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION + {0x11F00, 0x11F01, prCM, gcMn}, // [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA + {0x11F02, 0x11F02, prAL, gcLo}, // KAWI SIGN REPHA + {0x11F03, 0x11F03, prCM, gcMc}, // KAWI SIGN VISARGA + {0x11F04, 0x11F10, prAL, gcLo}, // [13] KAWI LETTER A..KAWI LETTER O + {0x11F12, 0x11F33, prAL, gcLo}, // [34] KAWI LETTER KA..KAWI LETTER JNYA + {0x11F34, 0x11F35, prCM, gcMc}, // [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA + {0x11F36, 0x11F3A, prCM, gcMn}, // [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R + {0x11F3E, 0x11F3F, prCM, gcMc}, // [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI + {0x11F40, 0x11F40, prCM, gcMn}, // KAWI VOWEL SIGN EU + {0x11F41, 0x11F41, prCM, gcMc}, // KAWI SIGN KILLER + {0x11F42, 0x11F42, prCM, gcMn}, // KAWI CONJOINER + {0x11F43, 0x11F44, prBA, gcPo}, // [2] KAWI DANDA..KAWI DOUBLE DANDA + {0x11F45, 0x11F4F, prID, gcPo}, // [11] KAWI PUNCTUATION SECTION MARKER..KAWI PUNCTUATION CLOSING SPIRAL + {0x11F50, 0x11F59, prNU, gcNd}, // [10] KAWI DIGIT ZERO..KAWI DIGIT NINE {0x11FB0, 0x11FB0, prAL, gcLo}, // LISU LETTER YHA {0x11FC0, 0x11FD4, prAL, gcNo}, // [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH {0x11FD5, 0x11FDC, prAL, gcSo}, // [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI @@ -3084,10 +3107,18 @@ var lineBreakCodePoints = [][4]int{ {0x1328A, 0x13378, prAL, gcLo}, // [239] EGYPTIAN HIEROGLYPH O037..EGYPTIAN HIEROGLYPH V011 {0x13379, 0x13379, prOP, gcLo}, // EGYPTIAN HIEROGLYPH V011A {0x1337A, 0x1337B, prCL, gcLo}, // [2] EGYPTIAN HIEROGLYPH V011B..EGYPTIAN HIEROGLYPH V011C - {0x1337C, 0x1342E, prAL, gcLo}, // [179] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH AA032 + {0x1337C, 0x1342F, prAL, gcLo}, // [180] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH V011D {0x13430, 0x13436, prGL, gcCf}, // [7] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH OVERLAY MIDDLE {0x13437, 0x13437, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN SEGMENT {0x13438, 0x13438, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END SEGMENT + {0x13439, 0x1343B, prGL, gcCf}, // [3] EGYPTIAN HIEROGLYPH INSERT AT MIDDLE..EGYPTIAN HIEROGLYPH INSERT AT BOTTOM + {0x1343C, 0x1343C, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN ENCLOSURE + {0x1343D, 0x1343D, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END ENCLOSURE + {0x1343E, 0x1343E, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN WALLED ENCLOSURE + {0x1343F, 0x1343F, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE + {0x13440, 0x13440, prCM, gcMn}, // EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY + {0x13441, 0x13446, prAL, gcLo}, // [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN + {0x13447, 0x13455, prCM, gcMn}, // [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED {0x14400, 0x145CD, prAL, gcLo}, // [462] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A409 {0x145CE, 0x145CE, prOP, gcLo}, // ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK {0x145CF, 0x145CF, prCL, gcLo}, // ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK @@ -3137,7 +3168,9 @@ var lineBreakCodePoints = [][4]int{ {0x1AFFD, 0x1AFFE, prAL, gcLm}, // [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8 {0x1B000, 0x1B0FF, prID, gcLo}, // [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2 {0x1B100, 0x1B122, prID, gcLo}, // [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU + {0x1B132, 0x1B132, prCJ, gcLo}, // HIRAGANA LETTER SMALL KO {0x1B150, 0x1B152, prCJ, gcLo}, // [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO + {0x1B155, 0x1B155, prCJ, gcLo}, // KATAKANA LETTER SMALL KO {0x1B164, 0x1B167, prCJ, gcLo}, // [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N {0x1B170, 0x1B2FB, prID, gcLo}, // [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB {0x1BC00, 0x1BC6A, prAL, gcLo}, // [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M @@ -3168,6 +3201,7 @@ var lineBreakCodePoints = [][4]int{ {0x1D200, 0x1D241, prAL, gcSo}, // [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54 {0x1D242, 0x1D244, prCM, gcMn}, // [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME {0x1D245, 0x1D245, prAL, gcSo}, // GREEK MUSICAL LEIMMA + {0x1D2C0, 0x1D2D3, prAL, gcNo}, // [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN {0x1D2E0, 0x1D2F3, prAL, gcNo}, // [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN {0x1D300, 0x1D356, prAL, gcSo}, // [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING {0x1D360, 0x1D378, prAL, gcNo}, // [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE @@ -3228,11 +3262,14 @@ var lineBreakCodePoints = [][4]int{ {0x1DF00, 0x1DF09, prAL, gcLl}, // [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK {0x1DF0A, 0x1DF0A, prAL, gcLo}, // LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK {0x1DF0B, 0x1DF1E, prAL, gcLl}, // [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL + {0x1DF25, 0x1DF2A, prAL, gcLl}, // [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK {0x1E000, 0x1E006, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE {0x1E008, 0x1E018, prCM, gcMn}, // [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU {0x1E01B, 0x1E021, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI {0x1E023, 0x1E024, prCM, gcMn}, // [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS {0x1E026, 0x1E02A, prCM, gcMn}, // [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA + {0x1E030, 0x1E06D, prAL, gcLm}, // [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE + {0x1E08F, 0x1E08F, prCM, gcMn}, // COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I {0x1E100, 0x1E12C, prAL, gcLo}, // [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W {0x1E130, 0x1E136, prCM, gcMn}, // [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D {0x1E137, 0x1E13D, prAL, gcLm}, // [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER @@ -3245,6 +3282,10 @@ var lineBreakCodePoints = [][4]int{ {0x1E2EC, 0x1E2EF, prCM, gcMn}, // [4] WANCHO TONE TUP..WANCHO TONE KOINI {0x1E2F0, 0x1E2F9, prNU, gcNd}, // [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE {0x1E2FF, 0x1E2FF, prPR, gcSc}, // WANCHO NGUN SIGN + {0x1E4D0, 0x1E4EA, prAL, gcLo}, // [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL + {0x1E4EB, 0x1E4EB, prAL, gcLm}, // NAG MUNDARI SIGN OJOD + {0x1E4EC, 0x1E4EF, prCM, gcMn}, // [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH + {0x1E4F0, 0x1E4F9, prNU, gcNd}, // [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE {0x1E7E0, 0x1E7E6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO {0x1E7E8, 0x1E7EB, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE {0x1E7ED, 0x1E7EE, prAL, gcLo}, // [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE @@ -3412,16 +3453,18 @@ var lineBreakCodePoints = [][4]int{ {0x1F6C1, 0x1F6CB, prID, gcSo}, // [11] BATHTUB..COUCH AND LAMP {0x1F6CC, 0x1F6CC, prEB, gcSo}, // SLEEPING ACCOMMODATION {0x1F6CD, 0x1F6D7, prID, gcSo}, // [11] SHOPPING BAGS..ELEVATOR - {0x1F6D8, 0x1F6DC, prID, gcCn}, // [5] .. - {0x1F6DD, 0x1F6EC, prID, gcSo}, // [16] PLAYGROUND SLIDE..AIRPLANE ARRIVING + {0x1F6D8, 0x1F6DB, prID, gcCn}, // [4] .. + {0x1F6DC, 0x1F6EC, prID, gcSo}, // [17] WIRELESS..AIRPLANE ARRIVING {0x1F6ED, 0x1F6EF, prID, gcCn}, // [3] .. {0x1F6F0, 0x1F6FC, prID, gcSo}, // [13] SATELLITE..ROLLER SKATE {0x1F6FD, 0x1F6FF, prID, gcCn}, // [3] .. {0x1F700, 0x1F773, prAL, gcSo}, // [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE - {0x1F774, 0x1F77F, prID, gcCn}, // [12] .. + {0x1F774, 0x1F776, prID, gcSo}, // [3] LOT OF FORTUNE..LUNAR ECLIPSE + {0x1F777, 0x1F77A, prID, gcCn}, // [4] .. + {0x1F77B, 0x1F77F, prID, gcSo}, // [5] HAUMEA..ORCUS {0x1F780, 0x1F7D4, prAL, gcSo}, // [85] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR - {0x1F7D5, 0x1F7D8, prID, gcSo}, // [4] CIRCLED TRIANGLE..NEGATIVE CIRCLED SQUARE - {0x1F7D9, 0x1F7DF, prID, gcCn}, // [7] .. + {0x1F7D5, 0x1F7D9, prID, gcSo}, // [5] CIRCLED TRIANGLE..NINE POINTED WHITE STAR + {0x1F7DA, 0x1F7DF, prID, gcCn}, // [6] .. {0x1F7E0, 0x1F7EB, prID, gcSo}, // [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE {0x1F7EC, 0x1F7EF, prID, gcCn}, // [4] .. {0x1F7F0, 0x1F7F0, prID, gcSo}, // HEAVY EQUALS SIGN @@ -3467,33 +3510,29 @@ var lineBreakCodePoints = [][4]int{ {0x1FA54, 0x1FA5F, prID, gcCn}, // [12] .. {0x1FA60, 0x1FA6D, prID, gcSo}, // [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER {0x1FA6E, 0x1FA6F, prID, gcCn}, // [2] .. - {0x1FA70, 0x1FA74, prID, gcSo}, // [5] BALLET SHOES..THONG SANDAL - {0x1FA75, 0x1FA77, prID, gcCn}, // [3] .. - {0x1FA78, 0x1FA7C, prID, gcSo}, // [5] DROP OF BLOOD..CRUTCH + {0x1FA70, 0x1FA7C, prID, gcSo}, // [13] BALLET SHOES..CRUTCH {0x1FA7D, 0x1FA7F, prID, gcCn}, // [3] .. - {0x1FA80, 0x1FA86, prID, gcSo}, // [7] YO-YO..NESTING DOLLS - {0x1FA87, 0x1FA8F, prID, gcCn}, // [9] .. - {0x1FA90, 0x1FAAC, prID, gcSo}, // [29] RINGED PLANET..HAMSA - {0x1FAAD, 0x1FAAF, prID, gcCn}, // [3] .. - {0x1FAB0, 0x1FABA, prID, gcSo}, // [11] FLY..NEST WITH EGGS - {0x1FABB, 0x1FABF, prID, gcCn}, // [5] .. - {0x1FAC0, 0x1FAC2, prID, gcSo}, // [3] ANATOMICAL HEART..PEOPLE HUGGING + {0x1FA80, 0x1FA88, prID, gcSo}, // [9] YO-YO..FLUTE + {0x1FA89, 0x1FA8F, prID, gcCn}, // [7] .. + {0x1FA90, 0x1FABD, prID, gcSo}, // [46] RINGED PLANET..WING + {0x1FABE, 0x1FABE, prID, gcCn}, // + {0x1FABF, 0x1FAC2, prID, gcSo}, // [4] GOOSE..PEOPLE HUGGING {0x1FAC3, 0x1FAC5, prEB, gcSo}, // [3] PREGNANT MAN..PERSON WITH CROWN - {0x1FAC6, 0x1FACF, prID, gcCn}, // [10] .. - {0x1FAD0, 0x1FAD9, prID, gcSo}, // [10] BLUEBERRIES..JAR - {0x1FADA, 0x1FADF, prID, gcCn}, // [6] .. - {0x1FAE0, 0x1FAE7, prID, gcSo}, // [8] MELTING FACE..BUBBLES - {0x1FAE8, 0x1FAEF, prID, gcCn}, // [8] .. - {0x1FAF0, 0x1FAF6, prEB, gcSo}, // [7] HAND WITH INDEX FINGER AND THUMB CROSSED..HEART HANDS - {0x1FAF7, 0x1FAFF, prID, gcCn}, // [9] .. + {0x1FAC6, 0x1FACD, prID, gcCn}, // [8] .. + {0x1FACE, 0x1FADB, prID, gcSo}, // [14] MOOSE..PEA POD + {0x1FADC, 0x1FADF, prID, gcCn}, // [4] .. + {0x1FAE0, 0x1FAE8, prID, gcSo}, // [9] MELTING FACE..SHAKING FACE + {0x1FAE9, 0x1FAEF, prID, gcCn}, // [7] .. + {0x1FAF0, 0x1FAF8, prEB, gcSo}, // [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND + {0x1FAF9, 0x1FAFF, prID, gcCn}, // [7] .. {0x1FB00, 0x1FB92, prAL, gcSo}, // [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK {0x1FB94, 0x1FBCA, prAL, gcSo}, // [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON {0x1FBF0, 0x1FBF9, prNU, gcNd}, // [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE {0x1FC00, 0x1FFFD, prID, gcCn}, // [1022] .. {0x20000, 0x2A6DF, prID, gcLo}, // [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF {0x2A6E0, 0x2A6FF, prID, gcCn}, // [32] .. - {0x2A700, 0x2B738, prID, gcLo}, // [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738 - {0x2B739, 0x2B73F, prID, gcCn}, // [7] .. + {0x2A700, 0x2B739, prID, gcLo}, // [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739 + {0x2B73A, 0x2B73F, prID, gcCn}, // [6] .. {0x2B740, 0x2B81D, prID, gcLo}, // [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D {0x2B81E, 0x2B81F, prID, gcCn}, // [2] .. {0x2B820, 0x2CEA1, prID, gcLo}, // [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 @@ -3504,7 +3543,9 @@ var lineBreakCodePoints = [][4]int{ {0x2FA1E, 0x2FA1F, prID, gcCn}, // [2] .. {0x2FA20, 0x2FFFD, prID, gcCn}, // [1502] .. {0x30000, 0x3134A, prID, gcLo}, // [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A - {0x3134B, 0x3FFFD, prID, gcCn}, // [60595] .. + {0x3134B, 0x3134F, prID, gcCn}, // [5] .. + {0x31350, 0x323AF, prID, gcLo}, // [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF + {0x323B0, 0x3FFFD, prID, gcCn}, // [56398] .. {0xE0001, 0xE0001, prCM, gcCf}, // LANGUAGE TAG {0xE0020, 0xE007F, prCM, gcCf}, // [96] TAG SPACE..CANCEL TAG {0xE0100, 0xE01EF, prCM, gcMn}, // [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 diff --git a/vendor/github.com/rivo/uniseg/linerules.go b/vendor/github.com/rivo/uniseg/linerules.go index d2ad51680e..7708ae0fbe 100644 --- a/vendor/github.com/rivo/uniseg/linerules.go +++ b/vendor/github.com/rivo/uniseg/linerules.go @@ -64,222 +64,381 @@ const ( LineMustBreak // You must break the line here. ) -// The line break parser's state transitions. It's anologous to grTransitions, -// see comments there for details. Unicode version 14.0.0. -var lbTransitions = map[[2]int][3]int{ +// lbTransitions implements the line break parser's state transitions. It's +// anologous to [grTransitions], see comments there for details. +// +// Unicode version 15.0.0. +func lbTransitions(state, prop int) (newState, lineBreak, rule int) { + switch uint64(state) | uint64(prop)<<32 { // LB4. - {lbAny, prBK}: {lbBK, LineCanBreak, 310}, - {lbBK, prAny}: {lbAny, LineMustBreak, 40}, + case lbBK | prAny<<32: + return lbAny, LineMustBreak, 40 // LB5. - {lbAny, prCR}: {lbCR, LineCanBreak, 310}, - {lbAny, prLF}: {lbLF, LineCanBreak, 310}, - {lbAny, prNL}: {lbNL, LineCanBreak, 310}, - {lbCR, prLF}: {lbLF, LineDontBreak, 50}, - {lbCR, prAny}: {lbAny, LineMustBreak, 50}, - {lbLF, prAny}: {lbAny, LineMustBreak, 50}, - {lbNL, prAny}: {lbAny, LineMustBreak, 50}, + case lbCR | prLF<<32: + return lbLF, LineDontBreak, 50 + case lbCR | prAny<<32: + return lbAny, LineMustBreak, 50 + case lbLF | prAny<<32: + return lbAny, LineMustBreak, 50 + case lbNL | prAny<<32: + return lbAny, LineMustBreak, 50 // LB6. - {lbAny, prBK}: {lbBK, LineDontBreak, 60}, - {lbAny, prCR}: {lbCR, LineDontBreak, 60}, - {lbAny, prLF}: {lbLF, LineDontBreak, 60}, - {lbAny, prNL}: {lbNL, LineDontBreak, 60}, + case lbAny | prBK<<32: + return lbBK, LineDontBreak, 60 + case lbAny | prCR<<32: + return lbCR, LineDontBreak, 60 + case lbAny | prLF<<32: + return lbLF, LineDontBreak, 60 + case lbAny | prNL<<32: + return lbNL, LineDontBreak, 60 // LB7. - {lbAny, prSP}: {lbSP, LineDontBreak, 70}, - {lbAny, prZW}: {lbZW, LineDontBreak, 70}, + case lbAny | prSP<<32: + return lbSP, LineDontBreak, 70 + case lbAny | prZW<<32: + return lbZW, LineDontBreak, 70 // LB8. - {lbZW, prSP}: {lbZW, LineDontBreak, 70}, - {lbZW, prAny}: {lbAny, LineCanBreak, 80}, + case lbZW | prSP<<32: + return lbZW, LineDontBreak, 70 + case lbZW | prAny<<32: + return lbAny, LineCanBreak, 80 // LB11. - {lbAny, prWJ}: {lbWJ, LineDontBreak, 110}, - {lbWJ, prAny}: {lbAny, LineDontBreak, 110}, + case lbAny | prWJ<<32: + return lbWJ, LineDontBreak, 110 + case lbWJ | prAny<<32: + return lbAny, LineDontBreak, 110 // LB12. - {lbAny, prGL}: {lbGL, LineCanBreak, 310}, - {lbGL, prAny}: {lbAny, LineDontBreak, 120}, + case lbAny | prGL<<32: + return lbGL, LineCanBreak, 310 + case lbGL | prAny<<32: + return lbAny, LineDontBreak, 120 // LB13 (simple transitions). - {lbAny, prCL}: {lbCL, LineCanBreak, 310}, - {lbAny, prCP}: {lbCP, LineCanBreak, 310}, - {lbAny, prEX}: {lbEX, LineDontBreak, 130}, - {lbAny, prIS}: {lbIS, LineCanBreak, 310}, - {lbAny, prSY}: {lbSY, LineCanBreak, 310}, + case lbAny | prCL<<32: + return lbCL, LineCanBreak, 310 + case lbAny | prCP<<32: + return lbCP, LineCanBreak, 310 + case lbAny | prEX<<32: + return lbEX, LineDontBreak, 130 + case lbAny | prIS<<32: + return lbIS, LineCanBreak, 310 + case lbAny | prSY<<32: + return lbSY, LineCanBreak, 310 // LB14. - {lbAny, prOP}: {lbOP, LineCanBreak, 310}, - {lbOP, prSP}: {lbOP, LineDontBreak, 70}, - {lbOP, prAny}: {lbAny, LineDontBreak, 140}, + case lbAny | prOP<<32: + return lbOP, LineCanBreak, 310 + case lbOP | prSP<<32: + return lbOP, LineDontBreak, 70 + case lbOP | prAny<<32: + return lbAny, LineDontBreak, 140 // LB15. - {lbQU, prSP}: {lbQUSP, LineDontBreak, 70}, - {lbQU, prOP}: {lbOP, LineDontBreak, 150}, - {lbQUSP, prOP}: {lbOP, LineDontBreak, 150}, + case lbQU | prSP<<32: + return lbQUSP, LineDontBreak, 70 + case lbQU | prOP<<32: + return lbOP, LineDontBreak, 150 + case lbQUSP | prOP<<32: + return lbOP, LineDontBreak, 150 // LB16. - {lbCL, prSP}: {lbCLCPSP, LineDontBreak, 70}, - {lbNUCL, prSP}: {lbCLCPSP, LineDontBreak, 70}, - {lbCP, prSP}: {lbCLCPSP, LineDontBreak, 70}, - {lbNUCP, prSP}: {lbCLCPSP, LineDontBreak, 70}, - {lbCL, prNS}: {lbNS, LineDontBreak, 160}, - {lbNUCL, prNS}: {lbNS, LineDontBreak, 160}, - {lbCP, prNS}: {lbNS, LineDontBreak, 160}, - {lbNUCP, prNS}: {lbNS, LineDontBreak, 160}, - {lbCLCPSP, prNS}: {lbNS, LineDontBreak, 160}, + case lbCL | prSP<<32: + return lbCLCPSP, LineDontBreak, 70 + case lbNUCL | prSP<<32: + return lbCLCPSP, LineDontBreak, 70 + case lbCP | prSP<<32: + return lbCLCPSP, LineDontBreak, 70 + case lbNUCP | prSP<<32: + return lbCLCPSP, LineDontBreak, 70 + case lbCL | prNS<<32: + return lbNS, LineDontBreak, 160 + case lbNUCL | prNS<<32: + return lbNS, LineDontBreak, 160 + case lbCP | prNS<<32: + return lbNS, LineDontBreak, 160 + case lbNUCP | prNS<<32: + return lbNS, LineDontBreak, 160 + case lbCLCPSP | prNS<<32: + return lbNS, LineDontBreak, 160 // LB17. - {lbAny, prB2}: {lbB2, LineCanBreak, 310}, - {lbB2, prSP}: {lbB2SP, LineDontBreak, 70}, - {lbB2, prB2}: {lbB2, LineDontBreak, 170}, - {lbB2SP, prB2}: {lbB2, LineDontBreak, 170}, + case lbAny | prB2<<32: + return lbB2, LineCanBreak, 310 + case lbB2 | prSP<<32: + return lbB2SP, LineDontBreak, 70 + case lbB2 | prB2<<32: + return lbB2, LineDontBreak, 170 + case lbB2SP | prB2<<32: + return lbB2, LineDontBreak, 170 // LB18. - {lbSP, prAny}: {lbAny, LineCanBreak, 180}, - {lbQUSP, prAny}: {lbAny, LineCanBreak, 180}, - {lbCLCPSP, prAny}: {lbAny, LineCanBreak, 180}, - {lbB2SP, prAny}: {lbAny, LineCanBreak, 180}, + case lbSP | prAny<<32: + return lbAny, LineCanBreak, 180 + case lbQUSP | prAny<<32: + return lbAny, LineCanBreak, 180 + case lbCLCPSP | prAny<<32: + return lbAny, LineCanBreak, 180 + case lbB2SP | prAny<<32: + return lbAny, LineCanBreak, 180 // LB19. - {lbAny, prQU}: {lbQU, LineDontBreak, 190}, - {lbQU, prAny}: {lbAny, LineDontBreak, 190}, + case lbAny | prQU<<32: + return lbQU, LineDontBreak, 190 + case lbQU | prAny<<32: + return lbAny, LineDontBreak, 190 // LB20. - {lbAny, prCB}: {lbCB, LineCanBreak, 200}, - {lbCB, prAny}: {lbAny, LineCanBreak, 200}, + case lbAny | prCB<<32: + return lbCB, LineCanBreak, 200 + case lbCB | prAny<<32: + return lbAny, LineCanBreak, 200 // LB21. - {lbAny, prBA}: {lbBA, LineDontBreak, 210}, - {lbAny, prHY}: {lbHY, LineDontBreak, 210}, - {lbAny, prNS}: {lbNS, LineDontBreak, 210}, - {lbAny, prBB}: {lbBB, LineCanBreak, 310}, - {lbBB, prAny}: {lbAny, LineDontBreak, 210}, + case lbAny | prBA<<32: + return lbBA, LineDontBreak, 210 + case lbAny | prHY<<32: + return lbHY, LineDontBreak, 210 + case lbAny | prNS<<32: + return lbNS, LineDontBreak, 210 + case lbAny | prBB<<32: + return lbBB, LineCanBreak, 310 + case lbBB | prAny<<32: + return lbAny, LineDontBreak, 210 // LB21a. - {lbAny, prHL}: {lbHL, LineCanBreak, 310}, - {lbHL, prHY}: {lbLB21a, LineDontBreak, 210}, - {lbHL, prBA}: {lbLB21a, LineDontBreak, 210}, - {lbLB21a, prAny}: {lbAny, LineDontBreak, 211}, + case lbAny | prHL<<32: + return lbHL, LineCanBreak, 310 + case lbHL | prHY<<32: + return lbLB21a, LineDontBreak, 210 + case lbHL | prBA<<32: + return lbLB21a, LineDontBreak, 210 + case lbLB21a | prAny<<32: + return lbAny, LineDontBreak, 211 // LB21b. - {lbSY, prHL}: {lbHL, LineDontBreak, 212}, - {lbNUSY, prHL}: {lbHL, LineDontBreak, 212}, + case lbSY | prHL<<32: + return lbHL, LineDontBreak, 212 + case lbNUSY | prHL<<32: + return lbHL, LineDontBreak, 212 // LB22. - {lbAny, prIN}: {lbAny, LineDontBreak, 220}, + case lbAny | prIN<<32: + return lbAny, LineDontBreak, 220 // LB23. - {lbAny, prAL}: {lbAL, LineCanBreak, 310}, - {lbAny, prNU}: {lbNU, LineCanBreak, 310}, - {lbAL, prNU}: {lbNU, LineDontBreak, 230}, - {lbHL, prNU}: {lbNU, LineDontBreak, 230}, - {lbNU, prAL}: {lbAL, LineDontBreak, 230}, - {lbNU, prHL}: {lbHL, LineDontBreak, 230}, - {lbNUNU, prAL}: {lbAL, LineDontBreak, 230}, - {lbNUNU, prHL}: {lbHL, LineDontBreak, 230}, + case lbAny | prAL<<32: + return lbAL, LineCanBreak, 310 + case lbAny | prNU<<32: + return lbNU, LineCanBreak, 310 + case lbAL | prNU<<32: + return lbNU, LineDontBreak, 230 + case lbHL | prNU<<32: + return lbNU, LineDontBreak, 230 + case lbNU | prAL<<32: + return lbAL, LineDontBreak, 230 + case lbNU | prHL<<32: + return lbHL, LineDontBreak, 230 + case lbNUNU | prAL<<32: + return lbAL, LineDontBreak, 230 + case lbNUNU | prHL<<32: + return lbHL, LineDontBreak, 230 // LB23a. - {lbAny, prPR}: {lbPR, LineCanBreak, 310}, - {lbAny, prID}: {lbIDEM, LineCanBreak, 310}, - {lbAny, prEB}: {lbEB, LineCanBreak, 310}, - {lbAny, prEM}: {lbIDEM, LineCanBreak, 310}, - {lbPR, prID}: {lbIDEM, LineDontBreak, 231}, - {lbPR, prEB}: {lbEB, LineDontBreak, 231}, - {lbPR, prEM}: {lbIDEM, LineDontBreak, 231}, - {lbIDEM, prPO}: {lbPO, LineDontBreak, 231}, - {lbEB, prPO}: {lbPO, LineDontBreak, 231}, + case lbAny | prPR<<32: + return lbPR, LineCanBreak, 310 + case lbAny | prID<<32: + return lbIDEM, LineCanBreak, 310 + case lbAny | prEB<<32: + return lbEB, LineCanBreak, 310 + case lbAny | prEM<<32: + return lbIDEM, LineCanBreak, 310 + case lbPR | prID<<32: + return lbIDEM, LineDontBreak, 231 + case lbPR | prEB<<32: + return lbEB, LineDontBreak, 231 + case lbPR | prEM<<32: + return lbIDEM, LineDontBreak, 231 + case lbIDEM | prPO<<32: + return lbPO, LineDontBreak, 231 + case lbEB | prPO<<32: + return lbPO, LineDontBreak, 231 // LB24. - {lbAny, prPO}: {lbPO, LineCanBreak, 310}, - {lbPR, prAL}: {lbAL, LineDontBreak, 240}, - {lbPR, prHL}: {lbHL, LineDontBreak, 240}, - {lbPO, prAL}: {lbAL, LineDontBreak, 240}, - {lbPO, prHL}: {lbHL, LineDontBreak, 240}, - {lbAL, prPR}: {lbPR, LineDontBreak, 240}, - {lbAL, prPO}: {lbPO, LineDontBreak, 240}, - {lbHL, prPR}: {lbPR, LineDontBreak, 240}, - {lbHL, prPO}: {lbPO, LineDontBreak, 240}, + case lbAny | prPO<<32: + return lbPO, LineCanBreak, 310 + case lbPR | prAL<<32: + return lbAL, LineDontBreak, 240 + case lbPR | prHL<<32: + return lbHL, LineDontBreak, 240 + case lbPO | prAL<<32: + return lbAL, LineDontBreak, 240 + case lbPO | prHL<<32: + return lbHL, LineDontBreak, 240 + case lbAL | prPR<<32: + return lbPR, LineDontBreak, 240 + case lbAL | prPO<<32: + return lbPO, LineDontBreak, 240 + case lbHL | prPR<<32: + return lbPR, LineDontBreak, 240 + case lbHL | prPO<<32: + return lbPO, LineDontBreak, 240 // LB25 (simple transitions). - {lbPR, prNU}: {lbNU, LineDontBreak, 250}, - {lbPO, prNU}: {lbNU, LineDontBreak, 250}, - {lbOP, prNU}: {lbNU, LineDontBreak, 250}, - {lbHY, prNU}: {lbNU, LineDontBreak, 250}, - {lbNU, prNU}: {lbNUNU, LineDontBreak, 250}, - {lbNU, prSY}: {lbNUSY, LineDontBreak, 250}, - {lbNU, prIS}: {lbNUIS, LineDontBreak, 250}, - {lbNUNU, prNU}: {lbNUNU, LineDontBreak, 250}, - {lbNUNU, prSY}: {lbNUSY, LineDontBreak, 250}, - {lbNUNU, prIS}: {lbNUIS, LineDontBreak, 250}, - {lbNUSY, prNU}: {lbNUNU, LineDontBreak, 250}, - {lbNUSY, prSY}: {lbNUSY, LineDontBreak, 250}, - {lbNUSY, prIS}: {lbNUIS, LineDontBreak, 250}, - {lbNUIS, prNU}: {lbNUNU, LineDontBreak, 250}, - {lbNUIS, prSY}: {lbNUSY, LineDontBreak, 250}, - {lbNUIS, prIS}: {lbNUIS, LineDontBreak, 250}, - {lbNU, prCL}: {lbNUCL, LineDontBreak, 250}, - {lbNU, prCP}: {lbNUCP, LineDontBreak, 250}, - {lbNUNU, prCL}: {lbNUCL, LineDontBreak, 250}, - {lbNUNU, prCP}: {lbNUCP, LineDontBreak, 250}, - {lbNUSY, prCL}: {lbNUCL, LineDontBreak, 250}, - {lbNUSY, prCP}: {lbNUCP, LineDontBreak, 250}, - {lbNUIS, prCL}: {lbNUCL, LineDontBreak, 250}, - {lbNUIS, prCP}: {lbNUCP, LineDontBreak, 250}, - {lbNU, prPO}: {lbPO, LineDontBreak, 250}, - {lbNUNU, prPO}: {lbPO, LineDontBreak, 250}, - {lbNUSY, prPO}: {lbPO, LineDontBreak, 250}, - {lbNUIS, prPO}: {lbPO, LineDontBreak, 250}, - {lbNUCL, prPO}: {lbPO, LineDontBreak, 250}, - {lbNUCP, prPO}: {lbPO, LineDontBreak, 250}, - {lbNU, prPR}: {lbPR, LineDontBreak, 250}, - {lbNUNU, prPR}: {lbPR, LineDontBreak, 250}, - {lbNUSY, prPR}: {lbPR, LineDontBreak, 250}, - {lbNUIS, prPR}: {lbPR, LineDontBreak, 250}, - {lbNUCL, prPR}: {lbPR, LineDontBreak, 250}, - {lbNUCP, prPR}: {lbPR, LineDontBreak, 250}, + case lbPR | prNU<<32: + return lbNU, LineDontBreak, 250 + case lbPO | prNU<<32: + return lbNU, LineDontBreak, 250 + case lbOP | prNU<<32: + return lbNU, LineDontBreak, 250 + case lbHY | prNU<<32: + return lbNU, LineDontBreak, 250 + case lbNU | prNU<<32: + return lbNUNU, LineDontBreak, 250 + case lbNU | prSY<<32: + return lbNUSY, LineDontBreak, 250 + case lbNU | prIS<<32: + return lbNUIS, LineDontBreak, 250 + case lbNUNU | prNU<<32: + return lbNUNU, LineDontBreak, 250 + case lbNUNU | prSY<<32: + return lbNUSY, LineDontBreak, 250 + case lbNUNU | prIS<<32: + return lbNUIS, LineDontBreak, 250 + case lbNUSY | prNU<<32: + return lbNUNU, LineDontBreak, 250 + case lbNUSY | prSY<<32: + return lbNUSY, LineDontBreak, 250 + case lbNUSY | prIS<<32: + return lbNUIS, LineDontBreak, 250 + case lbNUIS | prNU<<32: + return lbNUNU, LineDontBreak, 250 + case lbNUIS | prSY<<32: + return lbNUSY, LineDontBreak, 250 + case lbNUIS | prIS<<32: + return lbNUIS, LineDontBreak, 250 + case lbNU | prCL<<32: + return lbNUCL, LineDontBreak, 250 + case lbNU | prCP<<32: + return lbNUCP, LineDontBreak, 250 + case lbNUNU | prCL<<32: + return lbNUCL, LineDontBreak, 250 + case lbNUNU | prCP<<32: + return lbNUCP, LineDontBreak, 250 + case lbNUSY | prCL<<32: + return lbNUCL, LineDontBreak, 250 + case lbNUSY | prCP<<32: + return lbNUCP, LineDontBreak, 250 + case lbNUIS | prCL<<32: + return lbNUCL, LineDontBreak, 250 + case lbNUIS | prCP<<32: + return lbNUCP, LineDontBreak, 250 + case lbNU | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNUNU | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNUSY | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNUIS | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNUCL | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNUCP | prPO<<32: + return lbPO, LineDontBreak, 250 + case lbNU | prPR<<32: + return lbPR, LineDontBreak, 250 + case lbNUNU | prPR<<32: + return lbPR, LineDontBreak, 250 + case lbNUSY | prPR<<32: + return lbPR, LineDontBreak, 250 + case lbNUIS | prPR<<32: + return lbPR, LineDontBreak, 250 + case lbNUCL | prPR<<32: + return lbPR, LineDontBreak, 250 + case lbNUCP | prPR<<32: + return lbPR, LineDontBreak, 250 // LB26. - {lbAny, prJL}: {lbJL, LineCanBreak, 310}, - {lbAny, prJV}: {lbJV, LineCanBreak, 310}, - {lbAny, prJT}: {lbJT, LineCanBreak, 310}, - {lbAny, prH2}: {lbH2, LineCanBreak, 310}, - {lbAny, prH3}: {lbH3, LineCanBreak, 310}, - {lbJL, prJL}: {lbJL, LineDontBreak, 260}, - {lbJL, prJV}: {lbJV, LineDontBreak, 260}, - {lbJL, prH2}: {lbH2, LineDontBreak, 260}, - {lbJL, prH3}: {lbH3, LineDontBreak, 260}, - {lbJV, prJV}: {lbJV, LineDontBreak, 260}, - {lbJV, prJT}: {lbJT, LineDontBreak, 260}, - {lbH2, prJV}: {lbJV, LineDontBreak, 260}, - {lbH2, prJT}: {lbJT, LineDontBreak, 260}, - {lbJT, prJT}: {lbJT, LineDontBreak, 260}, - {lbH3, prJT}: {lbJT, LineDontBreak, 260}, + case lbAny | prJL<<32: + return lbJL, LineCanBreak, 310 + case lbAny | prJV<<32: + return lbJV, LineCanBreak, 310 + case lbAny | prJT<<32: + return lbJT, LineCanBreak, 310 + case lbAny | prH2<<32: + return lbH2, LineCanBreak, 310 + case lbAny | prH3<<32: + return lbH3, LineCanBreak, 310 + case lbJL | prJL<<32: + return lbJL, LineDontBreak, 260 + case lbJL | prJV<<32: + return lbJV, LineDontBreak, 260 + case lbJL | prH2<<32: + return lbH2, LineDontBreak, 260 + case lbJL | prH3<<32: + return lbH3, LineDontBreak, 260 + case lbJV | prJV<<32: + return lbJV, LineDontBreak, 260 + case lbJV | prJT<<32: + return lbJT, LineDontBreak, 260 + case lbH2 | prJV<<32: + return lbJV, LineDontBreak, 260 + case lbH2 | prJT<<32: + return lbJT, LineDontBreak, 260 + case lbJT | prJT<<32: + return lbJT, LineDontBreak, 260 + case lbH3 | prJT<<32: + return lbJT, LineDontBreak, 260 // LB27. - {lbJL, prPO}: {lbPO, LineDontBreak, 270}, - {lbJV, prPO}: {lbPO, LineDontBreak, 270}, - {lbJT, prPO}: {lbPO, LineDontBreak, 270}, - {lbH2, prPO}: {lbPO, LineDontBreak, 270}, - {lbH3, prPO}: {lbPO, LineDontBreak, 270}, - {lbPR, prJL}: {lbJL, LineDontBreak, 270}, - {lbPR, prJV}: {lbJV, LineDontBreak, 270}, - {lbPR, prJT}: {lbJT, LineDontBreak, 270}, - {lbPR, prH2}: {lbH2, LineDontBreak, 270}, - {lbPR, prH3}: {lbH3, LineDontBreak, 270}, + case lbJL | prPO<<32: + return lbPO, LineDontBreak, 270 + case lbJV | prPO<<32: + return lbPO, LineDontBreak, 270 + case lbJT | prPO<<32: + return lbPO, LineDontBreak, 270 + case lbH2 | prPO<<32: + return lbPO, LineDontBreak, 270 + case lbH3 | prPO<<32: + return lbPO, LineDontBreak, 270 + case lbPR | prJL<<32: + return lbJL, LineDontBreak, 270 + case lbPR | prJV<<32: + return lbJV, LineDontBreak, 270 + case lbPR | prJT<<32: + return lbJT, LineDontBreak, 270 + case lbPR | prH2<<32: + return lbH2, LineDontBreak, 270 + case lbPR | prH3<<32: + return lbH3, LineDontBreak, 270 // LB28. - {lbAL, prAL}: {lbAL, LineDontBreak, 280}, - {lbAL, prHL}: {lbHL, LineDontBreak, 280}, - {lbHL, prAL}: {lbAL, LineDontBreak, 280}, - {lbHL, prHL}: {lbHL, LineDontBreak, 280}, + case lbAL | prAL<<32: + return lbAL, LineDontBreak, 280 + case lbAL | prHL<<32: + return lbHL, LineDontBreak, 280 + case lbHL | prAL<<32: + return lbAL, LineDontBreak, 280 + case lbHL | prHL<<32: + return lbHL, LineDontBreak, 280 // LB29. - {lbIS, prAL}: {lbAL, LineDontBreak, 290}, - {lbIS, prHL}: {lbHL, LineDontBreak, 290}, - {lbNUIS, prAL}: {lbAL, LineDontBreak, 290}, - {lbNUIS, prHL}: {lbHL, LineDontBreak, 290}, + case lbIS | prAL<<32: + return lbAL, LineDontBreak, 290 + case lbIS | prHL<<32: + return lbHL, LineDontBreak, 290 + case lbNUIS | prAL<<32: + return lbAL, LineDontBreak, 290 + case lbNUIS | prHL<<32: + return lbHL, LineDontBreak, 290 + + default: + return -1, -1, -1 + } } // transitionLineBreakState determines the new state of the line break parser @@ -290,7 +449,7 @@ var lbTransitions = map[[2]int][3]int{ // further lookups. func transitionLineBreakState(state int, r rune, b []byte, str string) (newState int, lineBreak int) { // Determine the property of the next character. - nextProperty, generalCategory := propertyWithGenCat(lineBreakCodePoints, r) + nextProperty, generalCategory := propertyLineBreak(r) // Prepare. var forceNoBreak, isCPeaFWH bool @@ -306,7 +465,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState defer func() { // Transition into LB30. if newState == lbCP || newState == lbNUCP { - ea := property(eastAsianWidth, r) + ea := propertyEastAsianWidth(r) if ea != prF && ea != prW && ea != prH { newState |= lbCPeaFWHBit } @@ -352,30 +511,27 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState // Find the applicable transition in the table. var rule int - transition, ok := lbTransitions[[2]int{state, nextProperty}] - if ok { - // We have a specific transition. We'll use it. - newState, lineBreak, rule = transition[0], transition[1], transition[2] - } else { + newState, lineBreak, rule = lbTransitions(state, nextProperty) + if newState < 0 { // No specific transition found. Try the less specific ones. - transAnyProp, okAnyProp := lbTransitions[[2]int{state, prAny}] - transAnyState, okAnyState := lbTransitions[[2]int{lbAny, nextProperty}] - if okAnyProp && okAnyState { + anyPropProp, anyPropLineBreak, anyPropRule := lbTransitions(state, prAny) + anyStateProp, anyStateLineBreak, anyStateRule := lbTransitions(lbAny, nextProperty) + if anyPropProp >= 0 && anyStateProp >= 0 { // Both apply. We'll use a mix (see comments for grTransitions). - newState, lineBreak, rule = transAnyState[0], transAnyState[1], transAnyState[2] - if transAnyProp[2] < transAnyState[2] { - lineBreak, rule = transAnyProp[1], transAnyProp[2] + newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule + if anyPropRule < anyStateRule { + lineBreak, rule = anyPropLineBreak, anyPropRule } - } else if okAnyProp { + } else if anyPropProp >= 0 { // We only have a specific state. - newState, lineBreak, rule = transAnyProp[0], transAnyProp[1], transAnyProp[2] + newState, lineBreak, rule = anyPropProp, anyPropLineBreak, anyPropRule // This branch will probably never be reached because okAnyState will // always be true given the current transition map. But we keep it here // for future modifications to the transition map where this may not be // true anymore. - } else if okAnyState { + } else if anyStateProp >= 0 { // We only have a specific property. - newState, lineBreak, rule = transAnyState[0], transAnyState[1], transAnyState[2] + newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule } else { // No known transition. LB31: ALL ÷ ALL. newState, lineBreak, rule = lbAny, LineCanBreak, 310 @@ -414,7 +570,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState r, _ = utf8.DecodeRuneInString(str) } if r != utf8.RuneError { - pr, _ := propertyWithGenCat(lineBreakCodePoints, r) + pr, _ := propertyLineBreak(r) if pr == prNU { return lbNU, LineDontBreak } @@ -424,7 +580,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState // LB30 (part one). if rule > 300 { if (state == lbAL || state == lbHL || state == lbNU || state == lbNUNU) && nextProperty == prOP { - ea := property(eastAsianWidth, r) + ea := propertyEastAsianWidth(r) if ea != prF && ea != prW && ea != prH { return lbOP, LineDontBreak } @@ -460,7 +616,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState return prAny, LineDontBreak } } - graphemeProperty := property(graphemeCodePoints, r) + graphemeProperty := propertyGraphemes(r) if graphemeProperty == prExtendedPictographic && generalCategory == gcCn { return lbExtPicCn, LineCanBreak } diff --git a/vendor/github.com/rivo/uniseg/properties.go b/vendor/github.com/rivo/uniseg/properties.go index bc3c7bcf34..6290e6810f 100644 --- a/vendor/github.com/rivo/uniseg/properties.go +++ b/vendor/github.com/rivo/uniseg/properties.go @@ -160,9 +160,49 @@ func property(dictionary [][3]int, r rune) int { return propertySearch(dictionary, r)[2] } -// propertyWithGenCat returns the Unicode property value and General Category -// (see constants above) of the given code point. -func propertyWithGenCat(dictionary [][4]int, r rune) (property, generalCategory int) { - entry := propertySearch(dictionary, r) +// propertyLineBreak returns the Unicode property value and General Category +// (see constants above) of the given code point, as listed in the line break +// code points table, while fast tracking ASCII digits and letters. +func propertyLineBreak(r rune) (property, generalCategory int) { + if r >= 'a' && r <= 'z' { + return prAL, gcLl + } + if r >= 'A' && r <= 'Z' { + return prAL, gcLu + } + if r >= '0' && r <= '9' { + return prNU, gcNd + } + entry := propertySearch(lineBreakCodePoints, r) return entry[2], entry[3] } + +// propertyGraphemes returns the Unicode grapheme cluster property value of the +// given code point while fast tracking ASCII characters. +func propertyGraphemes(r rune) int { + if r >= 0x20 && r <= 0x7e { + return prAny + } + if r == 0x0a { + return prLF + } + if r == 0x0d { + return prCR + } + if r >= 0 && r <= 0x1f || r == 0x7f { + return prControl + } + return property(graphemeCodePoints, r) +} + +// propertyEastAsianWidth returns the Unicode East Asian Width property value of +// the given code point while fast tracking ASCII characters. +func propertyEastAsianWidth(r rune) int { + if r >= 0x20 && r <= 0x7e { + return prNa + } + if r >= 0 && r <= 0x1f || r == 0x7f { + return prN + } + return property(eastAsianWidth, r) +} diff --git a/vendor/github.com/rivo/uniseg/sentenceproperties.go b/vendor/github.com/rivo/uniseg/sentenceproperties.go index ba0cf2de11..67717ec1f3 100644 --- a/vendor/github.com/rivo/uniseg/sentenceproperties.go +++ b/vendor/github.com/rivo/uniseg/sentenceproperties.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // sentenceBreakCodePoints are taken from -// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/SentenceBreakProperty.txt +// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/SentenceBreakProperty.txt // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var sentenceBreakCodePoints = [][3]int{ {0x0009, 0x0009, prSp}, // Cc @@ -843,6 +843,7 @@ var sentenceBreakCodePoints = [][3]int{ {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL {0x0CE6, 0x0CEF, prNumeric}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE {0x0CF1, 0x0CF2, prOLetter}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA + {0x0CF3, 0x0CF3, prExtend}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU {0x0D02, 0x0D03, prExtend}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA {0x0D04, 0x0D0C, prOLetter}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L @@ -896,7 +897,7 @@ var sentenceBreakCodePoints = [][3]int{ {0x0EBD, 0x0EBD, prOLetter}, // Lo LAO SEMIVOWEL SIGN NYO {0x0EC0, 0x0EC4, prOLetter}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI {0x0EC6, 0x0EC6, prOLetter}, // Lm LAO KO LA - {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA + {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN {0x0ED0, 0x0ED9, prNumeric}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE {0x0EDC, 0x0EDF, prOLetter}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO {0x0F00, 0x0F00, prOLetter}, // Lo TIBETAN SYLLABLE OM @@ -958,7 +959,7 @@ var sentenceBreakCodePoints = [][3]int{ {0x10C7, 0x10C7, prUpper}, // L& GEORGIAN CAPITAL LETTER YN {0x10CD, 0x10CD, prUpper}, // L& GEORGIAN CAPITAL LETTER AEN {0x10D0, 0x10FA, prOLetter}, // L& [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN - {0x10FC, 0x10FC, prOLetter}, // Lm MODIFIER LETTER GEORGIAN NAR + {0x10FC, 0x10FC, prLower}, // Lm MODIFIER LETTER GEORGIAN NAR {0x10FD, 0x10FF, prOLetter}, // L& [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN {0x1100, 0x1248, prOLetter}, // Lo [329] HANGUL CHOSEONG KIYEOK..ETHIOPIC SYLLABLE QWA {0x124A, 0x124D, prOLetter}, // Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE @@ -2034,7 +2035,7 @@ var sentenceBreakCodePoints = [][3]int{ {0xA7D7, 0xA7D7, prLower}, // L& LATIN SMALL LETTER MIDDLE SCOTS S {0xA7D8, 0xA7D8, prUpper}, // L& LATIN CAPITAL LETTER SIGMOID S {0xA7D9, 0xA7D9, prLower}, // L& LATIN SMALL LETTER SIGMOID S - {0xA7F2, 0xA7F4, prOLetter}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q + {0xA7F2, 0xA7F4, prLower}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q {0xA7F5, 0xA7F5, prUpper}, // L& LATIN CAPITAL LETTER REVERSED HALF H {0xA7F6, 0xA7F6, prLower}, // L& LATIN SMALL LETTER REVERSED HALF H {0xA7F7, 0xA7F7, prOLetter}, // Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I @@ -2140,7 +2141,7 @@ var sentenceBreakCodePoints = [][3]int{ {0xAB30, 0xAB5A, prLower}, // L& [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG {0xAB5C, 0xAB5F, prLower}, // Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK {0xAB60, 0xAB68, prLower}, // L& [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE - {0xAB69, 0xAB69, prOLetter}, // Lm MODIFIER LETTER SMALL TURNED W + {0xAB69, 0xAB69, prLower}, // Lm MODIFIER LETTER SMALL TURNED W {0xAB70, 0xABBF, prLower}, // L& [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA {0xABC0, 0xABE2, prOLetter}, // Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM {0xABE3, 0xABE4, prExtend}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP @@ -2334,6 +2335,7 @@ var sentenceBreakCodePoints = [][3]int{ {0x10E80, 0x10EA9, prOLetter}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK {0x10EB0, 0x10EB1, prOLetter}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE + {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA {0x10F00, 0x10F1C, prOLetter}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL {0x10F27, 0x10F27, prOLetter}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH {0x10F30, 0x10F45, prOLetter}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN @@ -2408,6 +2410,8 @@ var sentenceBreakCodePoints = [][3]int{ {0x11238, 0x11239, prSTerm}, // Po [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA {0x1123B, 0x1123C, prSTerm}, // Po [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN + {0x1123F, 0x11240, prOLetter}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I + {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R {0x11280, 0x11286, prOLetter}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA {0x11288, 0x11288, prOLetter}, // Lo MULTANI LETTER GHA {0x1128A, 0x1128D, prOLetter}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA @@ -2603,13 +2607,29 @@ var sentenceBreakCodePoints = [][3]int{ {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U {0x11EF5, 0x11EF6, prExtend}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O {0x11EF7, 0x11EF8, prSTerm}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION + {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA + {0x11F02, 0x11F02, prOLetter}, // Lo KAWI SIGN REPHA + {0x11F03, 0x11F03, prExtend}, // Mc KAWI SIGN VISARGA + {0x11F04, 0x11F10, prOLetter}, // Lo [13] KAWI LETTER A..KAWI LETTER O + {0x11F12, 0x11F33, prOLetter}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA + {0x11F34, 0x11F35, prExtend}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA + {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R + {0x11F3E, 0x11F3F, prExtend}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI + {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU + {0x11F41, 0x11F41, prExtend}, // Mc KAWI SIGN KILLER + {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER + {0x11F43, 0x11F44, prSTerm}, // Po [2] KAWI DANDA..KAWI DOUBLE DANDA + {0x11F50, 0x11F59, prNumeric}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE {0x11FB0, 0x11FB0, prOLetter}, // Lo LISU LETTER YHA {0x12000, 0x12399, prOLetter}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U {0x12400, 0x1246E, prOLetter}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM {0x12480, 0x12543, prOLetter}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU {0x12F90, 0x12FF0, prOLetter}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114 - {0x13000, 0x1342E, prOLetter}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 - {0x13430, 0x13438, prFormat}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT + {0x13000, 0x1342F, prOLetter}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D + {0x13430, 0x1343F, prFormat}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE + {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY + {0x13441, 0x13446, prOLetter}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN + {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED {0x14400, 0x14646, prOLetter}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 {0x16800, 0x16A38, prOLetter}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ {0x16A40, 0x16A5E, prOLetter}, // Lo [31] MRO LETTER TA..MRO LETTER TEK @@ -2648,7 +2668,9 @@ var sentenceBreakCodePoints = [][3]int{ {0x1AFF5, 0x1AFFB, prOLetter}, // Lm [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5 {0x1AFFD, 0x1AFFE, prOLetter}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8 {0x1B000, 0x1B122, prOLetter}, // Lo [291] KATAKANA LETTER ARCHAIC E..KATAKANA LETTER ARCHAIC WU + {0x1B132, 0x1B132, prOLetter}, // Lo HIRAGANA LETTER SMALL KO {0x1B150, 0x1B152, prOLetter}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO + {0x1B155, 0x1B155, prOLetter}, // Lo KATAKANA LETTER SMALL KO {0x1B164, 0x1B167, prOLetter}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N {0x1B170, 0x1B2FB, prOLetter}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB {0x1BC00, 0x1BC6A, prOLetter}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M @@ -2738,11 +2760,14 @@ var sentenceBreakCodePoints = [][3]int{ {0x1DF00, 0x1DF09, prLower}, // L& [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK {0x1DF0A, 0x1DF0A, prOLetter}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK {0x1DF0B, 0x1DF1E, prLower}, // L& [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL + {0x1DF25, 0x1DF2A, prLower}, // L& [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK {0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE {0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA + {0x1E030, 0x1E06D, prLower}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE + {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I {0x1E100, 0x1E12C, prOLetter}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D {0x1E137, 0x1E13D, prOLetter}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER @@ -2753,6 +2778,10 @@ var sentenceBreakCodePoints = [][3]int{ {0x1E2C0, 0x1E2EB, prOLetter}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI {0x1E2F0, 0x1E2F9, prNumeric}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE + {0x1E4D0, 0x1E4EA, prOLetter}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL + {0x1E4EB, 0x1E4EB, prOLetter}, // Lm NAG MUNDARI SIGN OJOD + {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH + {0x1E4F0, 0x1E4F9, prNumeric}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE {0x1E7E0, 0x1E7E6, prOLetter}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO {0x1E7E8, 0x1E7EB, prOLetter}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE {0x1E7ED, 0x1E7EE, prOLetter}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE @@ -2803,12 +2832,13 @@ var sentenceBreakCodePoints = [][3]int{ {0x1F676, 0x1F678, prClose}, // So [3] SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT..SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT {0x1FBF0, 0x1FBF9, prNumeric}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE {0x20000, 0x2A6DF, prOLetter}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF - {0x2A700, 0x2B738, prOLetter}, // Lo [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738 + {0x2A700, 0x2B739, prOLetter}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739 {0x2B740, 0x2B81D, prOLetter}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D {0x2B820, 0x2CEA1, prOLetter}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1 {0x2CEB0, 0x2EBE0, prOLetter}, // Lo [7473] CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0 {0x2F800, 0x2FA1D, prOLetter}, // Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D {0x30000, 0x3134A, prOLetter}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A + {0x31350, 0x323AF, prOLetter}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF {0xE0001, 0xE0001, prFormat}, // Cf LANGUAGE TAG {0xE0020, 0xE007F, prExtend}, // Cf [96] TAG SPACE..CANCEL TAG {0xE0100, 0xE01EF, prExtend}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256 diff --git a/vendor/github.com/rivo/uniseg/sentencerules.go b/vendor/github.com/rivo/uniseg/sentencerules.go index 58c04794e8..0b29c7bdb8 100644 --- a/vendor/github.com/rivo/uniseg/sentencerules.go +++ b/vendor/github.com/rivo/uniseg/sentencerules.go @@ -18,104 +18,178 @@ const ( sbSB8aSp ) -// The sentence break parser's breaking instructions. -const ( - sbDontBreak = iota - sbBreak -) - -// The sentence break parser's state transitions. It's anologous to -// grTransitions, see comments there for details. Unicode version 14.0.0. -var sbTransitions = map[[2]int][3]int{ +// sbTransitions implements the sentence break parser's state transitions. It's +// anologous to [grTransitions], see comments there for details. +// +// Unicode version 15.0.0. +func sbTransitions(state, prop int) (newState int, sentenceBreak bool, rule int) { + switch uint64(state) | uint64(prop)<<32 { // SB3. - {sbAny, prCR}: {sbCR, sbDontBreak, 9990}, - {sbCR, prLF}: {sbParaSep, sbDontBreak, 30}, + case sbAny | prCR<<32: + return sbCR, false, 9990 + case sbCR | prLF<<32: + return sbParaSep, false, 30 // SB4. - {sbAny, prSep}: {sbParaSep, sbDontBreak, 9990}, - {sbAny, prLF}: {sbParaSep, sbDontBreak, 9990}, - {sbParaSep, prAny}: {sbAny, sbBreak, 40}, - {sbCR, prAny}: {sbAny, sbBreak, 40}, + case sbAny | prSep<<32: + return sbParaSep, false, 9990 + case sbAny | prLF<<32: + return sbParaSep, false, 9990 + case sbParaSep | prAny<<32: + return sbAny, true, 40 + case sbCR | prAny<<32: + return sbAny, true, 40 // SB6. - {sbAny, prATerm}: {sbATerm, sbDontBreak, 9990}, - {sbATerm, prNumeric}: {sbAny, sbDontBreak, 60}, - {sbSB7, prNumeric}: {sbAny, sbDontBreak, 60}, // Because ATerm also appears in SB7. + case sbAny | prATerm<<32: + return sbATerm, false, 9990 + case sbATerm | prNumeric<<32: + return sbAny, false, 60 + case sbSB7 | prNumeric<<32: + return sbAny, false, 60 // Because ATerm also appears in SB7. // SB7. - {sbAny, prUpper}: {sbUpper, sbDontBreak, 9990}, - {sbAny, prLower}: {sbLower, sbDontBreak, 9990}, - {sbUpper, prATerm}: {sbSB7, sbDontBreak, 70}, - {sbLower, prATerm}: {sbSB7, sbDontBreak, 70}, - {sbSB7, prUpper}: {sbUpper, sbDontBreak, 70}, + case sbAny | prUpper<<32: + return sbUpper, false, 9990 + case sbAny | prLower<<32: + return sbLower, false, 9990 + case sbUpper | prATerm<<32: + return sbSB7, false, 70 + case sbLower | prATerm<<32: + return sbSB7, false, 70 + case sbSB7 | prUpper<<32: + return sbUpper, false, 70 // SB8a. - {sbAny, prSTerm}: {sbSTerm, sbDontBreak, 9990}, - {sbATerm, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbATerm, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbATerm, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSB7, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSB7, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSB7, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSB8Close, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSB8Close, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSB8Close, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSB8Sp, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSB8Sp, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSB8Sp, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSTerm, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSTerm, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSTerm, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSB8aClose, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSB8aClose, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSB8aClose, prSTerm}: {sbSTerm, sbDontBreak, 81}, - {sbSB8aSp, prSContinue}: {sbAny, sbDontBreak, 81}, - {sbSB8aSp, prATerm}: {sbATerm, sbDontBreak, 81}, - {sbSB8aSp, prSTerm}: {sbSTerm, sbDontBreak, 81}, + case sbAny | prSTerm<<32: + return sbSTerm, false, 9990 + case sbATerm | prSContinue<<32: + return sbAny, false, 81 + case sbATerm | prATerm<<32: + return sbATerm, false, 81 + case sbATerm | prSTerm<<32: + return sbSTerm, false, 81 + case sbSB7 | prSContinue<<32: + return sbAny, false, 81 + case sbSB7 | prATerm<<32: + return sbATerm, false, 81 + case sbSB7 | prSTerm<<32: + return sbSTerm, false, 81 + case sbSB8Close | prSContinue<<32: + return sbAny, false, 81 + case sbSB8Close | prATerm<<32: + return sbATerm, false, 81 + case sbSB8Close | prSTerm<<32: + return sbSTerm, false, 81 + case sbSB8Sp | prSContinue<<32: + return sbAny, false, 81 + case sbSB8Sp | prATerm<<32: + return sbATerm, false, 81 + case sbSB8Sp | prSTerm<<32: + return sbSTerm, false, 81 + case sbSTerm | prSContinue<<32: + return sbAny, false, 81 + case sbSTerm | prATerm<<32: + return sbATerm, false, 81 + case sbSTerm | prSTerm<<32: + return sbSTerm, false, 81 + case sbSB8aClose | prSContinue<<32: + return sbAny, false, 81 + case sbSB8aClose | prATerm<<32: + return sbATerm, false, 81 + case sbSB8aClose | prSTerm<<32: + return sbSTerm, false, 81 + case sbSB8aSp | prSContinue<<32: + return sbAny, false, 81 + case sbSB8aSp | prATerm<<32: + return sbATerm, false, 81 + case sbSB8aSp | prSTerm<<32: + return sbSTerm, false, 81 // SB9. - {sbATerm, prClose}: {sbSB8Close, sbDontBreak, 90}, - {sbSB7, prClose}: {sbSB8Close, sbDontBreak, 90}, - {sbSB8Close, prClose}: {sbSB8Close, sbDontBreak, 90}, - {sbATerm, prSp}: {sbSB8Sp, sbDontBreak, 90}, - {sbSB7, prSp}: {sbSB8Sp, sbDontBreak, 90}, - {sbSB8Close, prSp}: {sbSB8Sp, sbDontBreak, 90}, - {sbSTerm, prClose}: {sbSB8aClose, sbDontBreak, 90}, - {sbSB8aClose, prClose}: {sbSB8aClose, sbDontBreak, 90}, - {sbSTerm, prSp}: {sbSB8aSp, sbDontBreak, 90}, - {sbSB8aClose, prSp}: {sbSB8aSp, sbDontBreak, 90}, - {sbATerm, prSep}: {sbParaSep, sbDontBreak, 90}, - {sbATerm, prCR}: {sbParaSep, sbDontBreak, 90}, - {sbATerm, prLF}: {sbParaSep, sbDontBreak, 90}, - {sbSB7, prSep}: {sbParaSep, sbDontBreak, 90}, - {sbSB7, prCR}: {sbParaSep, sbDontBreak, 90}, - {sbSB7, prLF}: {sbParaSep, sbDontBreak, 90}, - {sbSB8Close, prSep}: {sbParaSep, sbDontBreak, 90}, - {sbSB8Close, prCR}: {sbParaSep, sbDontBreak, 90}, - {sbSB8Close, prLF}: {sbParaSep, sbDontBreak, 90}, - {sbSTerm, prSep}: {sbParaSep, sbDontBreak, 90}, - {sbSTerm, prCR}: {sbParaSep, sbDontBreak, 90}, - {sbSTerm, prLF}: {sbParaSep, sbDontBreak, 90}, - {sbSB8aClose, prSep}: {sbParaSep, sbDontBreak, 90}, - {sbSB8aClose, prCR}: {sbParaSep, sbDontBreak, 90}, - {sbSB8aClose, prLF}: {sbParaSep, sbDontBreak, 90}, + case sbATerm | prClose<<32: + return sbSB8Close, false, 90 + case sbSB7 | prClose<<32: + return sbSB8Close, false, 90 + case sbSB8Close | prClose<<32: + return sbSB8Close, false, 90 + case sbATerm | prSp<<32: + return sbSB8Sp, false, 90 + case sbSB7 | prSp<<32: + return sbSB8Sp, false, 90 + case sbSB8Close | prSp<<32: + return sbSB8Sp, false, 90 + case sbSTerm | prClose<<32: + return sbSB8aClose, false, 90 + case sbSB8aClose | prClose<<32: + return sbSB8aClose, false, 90 + case sbSTerm | prSp<<32: + return sbSB8aSp, false, 90 + case sbSB8aClose | prSp<<32: + return sbSB8aSp, false, 90 + case sbATerm | prSep<<32: + return sbParaSep, false, 90 + case sbATerm | prCR<<32: + return sbParaSep, false, 90 + case sbATerm | prLF<<32: + return sbParaSep, false, 90 + case sbSB7 | prSep<<32: + return sbParaSep, false, 90 + case sbSB7 | prCR<<32: + return sbParaSep, false, 90 + case sbSB7 | prLF<<32: + return sbParaSep, false, 90 + case sbSB8Close | prSep<<32: + return sbParaSep, false, 90 + case sbSB8Close | prCR<<32: + return sbParaSep, false, 90 + case sbSB8Close | prLF<<32: + return sbParaSep, false, 90 + case sbSTerm | prSep<<32: + return sbParaSep, false, 90 + case sbSTerm | prCR<<32: + return sbParaSep, false, 90 + case sbSTerm | prLF<<32: + return sbParaSep, false, 90 + case sbSB8aClose | prSep<<32: + return sbParaSep, false, 90 + case sbSB8aClose | prCR<<32: + return sbParaSep, false, 90 + case sbSB8aClose | prLF<<32: + return sbParaSep, false, 90 // SB10. - {sbSB8Sp, prSp}: {sbSB8Sp, sbDontBreak, 100}, - {sbSB8aSp, prSp}: {sbSB8aSp, sbDontBreak, 100}, - {sbSB8Sp, prSep}: {sbParaSep, sbDontBreak, 100}, - {sbSB8Sp, prCR}: {sbParaSep, sbDontBreak, 100}, - {sbSB8Sp, prLF}: {sbParaSep, sbDontBreak, 100}, + case sbSB8Sp | prSp<<32: + return sbSB8Sp, false, 100 + case sbSB8aSp | prSp<<32: + return sbSB8aSp, false, 100 + case sbSB8Sp | prSep<<32: + return sbParaSep, false, 100 + case sbSB8Sp | prCR<<32: + return sbParaSep, false, 100 + case sbSB8Sp | prLF<<32: + return sbParaSep, false, 100 // SB11. - {sbATerm, prAny}: {sbAny, sbBreak, 110}, - {sbSB7, prAny}: {sbAny, sbBreak, 110}, - {sbSB8Close, prAny}: {sbAny, sbBreak, 110}, - {sbSB8Sp, prAny}: {sbAny, sbBreak, 110}, - {sbSTerm, prAny}: {sbAny, sbBreak, 110}, - {sbSB8aClose, prAny}: {sbAny, sbBreak, 110}, - {sbSB8aSp, prAny}: {sbAny, sbBreak, 110}, + case sbATerm | prAny<<32: + return sbAny, true, 110 + case sbSB7 | prAny<<32: + return sbAny, true, 110 + case sbSB8Close | prAny<<32: + return sbAny, true, 110 + case sbSB8Sp | prAny<<32: + return sbAny, true, 110 + case sbSTerm | prAny<<32: + return sbAny, true, 110 + case sbSB8aClose | prAny<<32: + return sbAny, true, 110 + case sbSB8aSp | prAny<<32: + return sbAny, true, 110 // We'll always break after ParaSep due to SB4. + + default: + return -1, false, -1 + } } // transitionSentenceBreakState determines the new state of the sentence break @@ -141,30 +215,27 @@ func transitionSentenceBreakState(state int, r rune, b []byte, str string) (newS // Find the applicable transition in the table. var rule int - transition, ok := sbTransitions[[2]int{state, nextProperty}] - if ok { - // We have a specific transition. We'll use it. - newState, sentenceBreak, rule = transition[0], transition[1] == sbBreak, transition[2] - } else { + newState, sentenceBreak, rule = sbTransitions(state, nextProperty) + if newState < 0 { // No specific transition found. Try the less specific ones. - transAnyProp, okAnyProp := sbTransitions[[2]int{state, prAny}] - transAnyState, okAnyState := sbTransitions[[2]int{sbAny, nextProperty}] - if okAnyProp && okAnyState { + anyPropState, anyPropProp, anyPropRule := sbTransitions(state, prAny) + anyStateState, anyStateProp, anyStateRule := sbTransitions(sbAny, nextProperty) + if anyPropState >= 0 && anyStateState >= 0 { // Both apply. We'll use a mix (see comments for grTransitions). - newState, sentenceBreak, rule = transAnyState[0], transAnyState[1] == sbBreak, transAnyState[2] - if transAnyProp[2] < transAnyState[2] { - sentenceBreak, rule = transAnyProp[1] == sbBreak, transAnyProp[2] + newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule + if anyPropRule < anyStateRule { + sentenceBreak, rule = anyPropProp, anyPropRule } - } else if okAnyProp { + } else if anyPropState >= 0 { // We only have a specific state. - newState, sentenceBreak, rule = transAnyProp[0], transAnyProp[1] == sbBreak, transAnyProp[2] + newState, sentenceBreak, rule = anyPropState, anyPropProp, anyPropRule // This branch will probably never be reached because okAnyState will // always be true given the current transition map. But we keep it here // for future modifications to the transition map where this may not be // true anymore. - } else if okAnyState { + } else if anyStateState >= 0 { // We only have a specific property. - newState, sentenceBreak, rule = transAnyState[0], transAnyState[1] == sbBreak, transAnyState[2] + newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule } else { // No known transition. SB999: Any × Any. newState, sentenceBreak, rule = sbAny, false, 9990 diff --git a/vendor/github.com/rivo/uniseg/step.go b/vendor/github.com/rivo/uniseg/step.go index 6eca4b5dc7..9b72c5e596 100644 --- a/vendor/github.com/rivo/uniseg/step.go +++ b/vendor/github.com/rivo/uniseg/step.go @@ -100,7 +100,7 @@ func Step(b []byte, state int) (cluster, rest []byte, boundaries int, newState i if len(b) <= length { // If we're already past the end, there is nothing else to parse. var prop int if state < 0 { - prop = property(graphemeCodePoints, r) + prop = propertyGraphemes(r) } else { prop = state >> shiftPropState } @@ -150,16 +150,14 @@ func Step(b []byte, state int) (cluster, rest []byte, boundaries int, newState i return b[:length], b[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState) } - if r == vs16 { - width = 2 - } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL { - width += runeWidth(r, prop) - } else if firstProp == prExtendedPictographic { + if firstProp == prExtendedPictographic { if r == vs15 { width = 1 - } else { + } else if r == vs16 { width = 2 } + } else if firstProp != prRegionalIndicator && firstProp != prL { + width += runeWidth(r, prop) } length += l @@ -179,7 +177,7 @@ func StepString(str string, state int) (cluster, rest string, boundaries int, ne // Extract the first rune. r, length := utf8.DecodeRuneInString(str) if len(str) <= length { // If we're already past the end, there is nothing else to parse. - prop := property(graphemeCodePoints, r) + prop := propertyGraphemes(r) return str, "", LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (runeWidth(r, prop) << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState) } @@ -226,16 +224,14 @@ func StepString(str string, state int) (cluster, rest string, boundaries int, ne return str[:length], str[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState) } - if r == vs16 { - width = 2 - } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL { - width += runeWidth(r, prop) - } else if firstProp == prExtendedPictographic { + if firstProp == prExtendedPictographic { if r == vs15 { width = 1 - } else { + } else if r == vs16 { width = 2 } + } else if firstProp != prRegionalIndicator && firstProp != prL { + width += runeWidth(r, prop) } length += l diff --git a/vendor/github.com/rivo/uniseg/width.go b/vendor/github.com/rivo/uniseg/width.go index 12a57cc2e3..975a9f1343 100644 --- a/vendor/github.com/rivo/uniseg/width.go +++ b/vendor/github.com/rivo/uniseg/width.go @@ -1,5 +1,10 @@ package uniseg +// EastAsianAmbiguousWidth specifies the monospace width for East Asian +// characters classified as Ambiguous. The default is 1 but some rare fonts +// render them with a width of 2. +var EastAsianAmbiguousWidth = 1 + // runeWidth returns the monospace width for the given rune. The provided // grapheme property is a value mapped by the [graphemeCodePoints] table. // @@ -33,9 +38,11 @@ func runeWidth(r rune, graphemeProperty int) int { return 4 } - switch property(eastAsianWidth, r) { + switch propertyEastAsianWidth(r) { case prW, prF: return 2 + case prA: + return EastAsianAmbiguousWidth } return 1 diff --git a/vendor/github.com/rivo/uniseg/wordproperties.go b/vendor/github.com/rivo/uniseg/wordproperties.go index 805cc536cb..277ca10068 100644 --- a/vendor/github.com/rivo/uniseg/wordproperties.go +++ b/vendor/github.com/rivo/uniseg/wordproperties.go @@ -1,13 +1,13 @@ -package uniseg - // Code generated via go generate from gen_properties.go. DO NOT EDIT. +package uniseg + // workBreakCodePoints are taken from -// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/WordBreakProperty.txt +// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/WordBreakProperty.txt // and -// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt +// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt // ("Extended_Pictographic" only) -// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode +// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode // license agreement. var workBreakCodePoints = [][3]int{ {0x000A, 0x000A, prLF}, // Cc @@ -318,6 +318,7 @@ var workBreakCodePoints = [][3]int{ {0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL {0x0CE6, 0x0CEF, prNumeric}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE {0x0CF1, 0x0CF2, prALetter}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA + {0x0CF3, 0x0CF3, prExtend}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT {0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU {0x0D02, 0x0D03, prExtend}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA {0x0D04, 0x0D0C, prALetter}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L @@ -357,7 +358,7 @@ var workBreakCodePoints = [][3]int{ {0x0E50, 0x0E59, prNumeric}, // Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE {0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN {0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO - {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA + {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN {0x0ED0, 0x0ED9, prNumeric}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE {0x0F00, 0x0F00, prALetter}, // Lo TIBETAN SYLLABLE OM {0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS @@ -1093,6 +1094,7 @@ var workBreakCodePoints = [][3]int{ {0x10E80, 0x10EA9, prALetter}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET {0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK {0x10EB0, 0x10EB1, prALetter}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE + {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA {0x10F00, 0x10F1C, prALetter}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL {0x10F27, 0x10F27, prALetter}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH {0x10F30, 0x10F45, prALetter}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN @@ -1157,6 +1159,8 @@ var workBreakCodePoints = [][3]int{ {0x11235, 0x11235, prExtend}, // Mc KHOJKI SIGN VIRAMA {0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA {0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN + {0x1123F, 0x11240, prALetter}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I + {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R {0x11280, 0x11286, prALetter}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA {0x11288, 0x11288, prALetter}, // Lo MULTANI LETTER GHA {0x1128A, 0x1128D, prALetter}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA @@ -1337,13 +1341,28 @@ var workBreakCodePoints = [][3]int{ {0x11EE0, 0x11EF2, prALetter}, // Lo [19] MAKASAR LETTER KA..MAKASAR ANGKA {0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U {0x11EF5, 0x11EF6, prExtend}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O + {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA + {0x11F02, 0x11F02, prALetter}, // Lo KAWI SIGN REPHA + {0x11F03, 0x11F03, prExtend}, // Mc KAWI SIGN VISARGA + {0x11F04, 0x11F10, prALetter}, // Lo [13] KAWI LETTER A..KAWI LETTER O + {0x11F12, 0x11F33, prALetter}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA + {0x11F34, 0x11F35, prExtend}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA + {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R + {0x11F3E, 0x11F3F, prExtend}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI + {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU + {0x11F41, 0x11F41, prExtend}, // Mc KAWI SIGN KILLER + {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER + {0x11F50, 0x11F59, prNumeric}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE {0x11FB0, 0x11FB0, prALetter}, // Lo LISU LETTER YHA {0x12000, 0x12399, prALetter}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U {0x12400, 0x1246E, prALetter}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM {0x12480, 0x12543, prALetter}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU {0x12F90, 0x12FF0, prALetter}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114 - {0x13000, 0x1342E, prALetter}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032 - {0x13430, 0x13438, prFormat}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT + {0x13000, 0x1342F, prALetter}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D + {0x13430, 0x1343F, prFormat}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE + {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY + {0x13441, 0x13446, prALetter}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN + {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED {0x14400, 0x14646, prALetter}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530 {0x16800, 0x16A38, prALetter}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ {0x16A40, 0x16A5E, prALetter}, // Lo [31] MRO LETTER TA..MRO LETTER TEK @@ -1374,6 +1393,7 @@ var workBreakCodePoints = [][3]int{ {0x1AFFD, 0x1AFFE, prKatakana}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8 {0x1B000, 0x1B000, prKatakana}, // Lo KATAKANA LETTER ARCHAIC E {0x1B120, 0x1B122, prKatakana}, // Lo [3] KATAKANA LETTER ARCHAIC YI..KATAKANA LETTER ARCHAIC WU + {0x1B155, 0x1B155, prKatakana}, // Lo KATAKANA LETTER SMALL KO {0x1B164, 0x1B167, prKatakana}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N {0x1BC00, 0x1BC6A, prALetter}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M {0x1BC70, 0x1BC7C, prALetter}, // Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK @@ -1431,11 +1451,14 @@ var workBreakCodePoints = [][3]int{ {0x1DF00, 0x1DF09, prALetter}, // L& [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK {0x1DF0A, 0x1DF0A, prALetter}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK {0x1DF0B, 0x1DF1E, prALetter}, // L& [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL + {0x1DF25, 0x1DF2A, prALetter}, // L& [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK {0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE {0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU {0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI {0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS {0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA + {0x1E030, 0x1E06D, prALetter}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE + {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I {0x1E100, 0x1E12C, prALetter}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W {0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D {0x1E137, 0x1E13D, prALetter}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER @@ -1446,6 +1469,10 @@ var workBreakCodePoints = [][3]int{ {0x1E2C0, 0x1E2EB, prALetter}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH {0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI {0x1E2F0, 0x1E2F9, prNumeric}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE + {0x1E4D0, 0x1E4EA, prALetter}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL + {0x1E4EB, 0x1E4EB, prALetter}, // Lm NAG MUNDARI SIGN OJOD + {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH + {0x1E4F0, 0x1E4F9, prNumeric}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE {0x1E7E0, 0x1E7E6, prALetter}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO {0x1E7E8, 0x1E7EB, prALetter}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE {0x1E7ED, 0x1E7EE, prALetter}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE @@ -1740,7 +1767,8 @@ var workBreakCodePoints = [][3]int{ {0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA {0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple {0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator - {0x1F6D8, 0x1F6DC, prExtendedPictographic}, // E0.0 [5] (🛘..🛜) .. + {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (🛘..🛛) .. + {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless {0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy {0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat {0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE @@ -1757,7 +1785,7 @@ var workBreakCodePoints = [][3]int{ {0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw {0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate {0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (🛽..🛿) .. - {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) .. + {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS {0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE.. {0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square {0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (🟬..🟯) .. @@ -1816,30 +1844,37 @@ var workBreakCodePoints = [][3]int{ {0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING.. {0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts {0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal - {0x1FA75, 0x1FA77, prExtendedPictographic}, // E0.0 [3] (🩵..🩷) .. + {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart {0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope {0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch {0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (🩽..🩿) .. {0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute {0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls - {0x1FA87, 0x1FA8F, prExtendedPictographic}, // E0.0 [9] (🪇..🪏) .. + {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute + {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (🪉..🪏) .. {0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo {0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock {0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa - {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E0.0 [3] (🪭..🪯) .. + {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda {0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather {0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs - {0x1FABB, 0x1FABF, prExtendedPictographic}, // E0.0 [5] (🪻..🪿) .. + {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing + {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] (🪾) + {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose {0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging {0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown - {0x1FAC6, 0x1FACF, prExtendedPictographic}, // E0.0 [10] (🫆..🫏) .. + {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (🫆..🫍) .. + {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey {0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot {0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar - {0x1FADA, 0x1FADF, prExtendedPictographic}, // E0.0 [6] (🫚..🫟) .. + {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod + {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (🫜..🫟) .. {0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles - {0x1FAE8, 0x1FAEF, prExtendedPictographic}, // E0.0 [8] (🫨..🫯) .. + {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face + {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (🫩..🫯) .. {0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands - {0x1FAF7, 0x1FAFF, prExtendedPictographic}, // E0.0 [9] (🫷..🫿) .. + {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand + {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (🫹..🫿) .. {0x1FBF0, 0x1FBF9, prNumeric}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE {0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (🰀..🿽) .. {0xE0001, 0xE0001, prFormat}, // Cf LANGUAGE TAG diff --git a/vendor/github.com/rivo/uniseg/wordrules.go b/vendor/github.com/rivo/uniseg/wordrules.go index 325407e40b..57a8c68311 100644 --- a/vendor/github.com/rivo/uniseg/wordrules.go +++ b/vendor/github.com/rivo/uniseg/wordrules.go @@ -22,82 +22,121 @@ const ( wbZWJBit = 16 // This bit is set for any states followed by at least one zero-width joiner (see WB4 and WB3c). ) -// The word break parser's breaking instructions. -const ( - wbDontBreak = iota - wbBreak -) - -// The word break parser's state transitions. It's anologous to grTransitions, -// see comments there for details. Unicode version 14.0.0. -var wbTransitions = map[[2]int][3]int{ +// wbTransitions implements the word break parser's state transitions. It's +// anologous to [grTransitions], see comments there for details. +// +// Unicode version 15.0.0. +func wbTransitions(state, prop int) (newState int, wordBreak bool, rule int) { + switch uint64(state) | uint64(prop)<<32 { // WB3b. - {wbAny, prNewline}: {wbNewline, wbBreak, 32}, - {wbAny, prCR}: {wbCR, wbBreak, 32}, - {wbAny, prLF}: {wbLF, wbBreak, 32}, + case wbAny | prNewline<<32: + return wbNewline, true, 32 + case wbAny | prCR<<32: + return wbCR, true, 32 + case wbAny | prLF<<32: + return wbLF, true, 32 // WB3a. - {wbNewline, prAny}: {wbAny, wbBreak, 31}, - {wbCR, prAny}: {wbAny, wbBreak, 31}, - {wbLF, prAny}: {wbAny, wbBreak, 31}, + case wbNewline | prAny<<32: + return wbAny, true, 31 + case wbCR | prAny<<32: + return wbAny, true, 31 + case wbLF | prAny<<32: + return wbAny, true, 31 // WB3. - {wbCR, prLF}: {wbLF, wbDontBreak, 30}, + case wbCR | prLF<<32: + return wbLF, false, 30 // WB3d. - {wbAny, prWSegSpace}: {wbWSegSpace, wbBreak, 9990}, - {wbWSegSpace, prWSegSpace}: {wbWSegSpace, wbDontBreak, 34}, + case wbAny | prWSegSpace<<32: + return wbWSegSpace, true, 9990 + case wbWSegSpace | prWSegSpace<<32: + return wbWSegSpace, false, 34 // WB5. - {wbAny, prALetter}: {wbALetter, wbBreak, 9990}, - {wbAny, prHebrewLetter}: {wbHebrewLetter, wbBreak, 9990}, - {wbALetter, prALetter}: {wbALetter, wbDontBreak, 50}, - {wbALetter, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 50}, - {wbHebrewLetter, prALetter}: {wbALetter, wbDontBreak, 50}, - {wbHebrewLetter, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 50}, + case wbAny | prALetter<<32: + return wbALetter, true, 9990 + case wbAny | prHebrewLetter<<32: + return wbHebrewLetter, true, 9990 + case wbALetter | prALetter<<32: + return wbALetter, false, 50 + case wbALetter | prHebrewLetter<<32: + return wbHebrewLetter, false, 50 + case wbHebrewLetter | prALetter<<32: + return wbALetter, false, 50 + case wbHebrewLetter | prHebrewLetter<<32: + return wbHebrewLetter, false, 50 // WB7. Transitions to wbWB7 handled by transitionWordBreakState(). - {wbWB7, prALetter}: {wbALetter, wbDontBreak, 70}, - {wbWB7, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 70}, + case wbWB7 | prALetter<<32: + return wbALetter, false, 70 + case wbWB7 | prHebrewLetter<<32: + return wbHebrewLetter, false, 70 // WB7a. - {wbHebrewLetter, prSingleQuote}: {wbAny, wbDontBreak, 71}, + case wbHebrewLetter | prSingleQuote<<32: + return wbAny, false, 71 // WB7c. Transitions to wbWB7c handled by transitionWordBreakState(). - {wbWB7c, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 73}, + case wbWB7c | prHebrewLetter<<32: + return wbHebrewLetter, false, 73 // WB8. - {wbAny, prNumeric}: {wbNumeric, wbBreak, 9990}, - {wbNumeric, prNumeric}: {wbNumeric, wbDontBreak, 80}, + case wbAny | prNumeric<<32: + return wbNumeric, true, 9990 + case wbNumeric | prNumeric<<32: + return wbNumeric, false, 80 // WB9. - {wbALetter, prNumeric}: {wbNumeric, wbDontBreak, 90}, - {wbHebrewLetter, prNumeric}: {wbNumeric, wbDontBreak, 90}, + case wbALetter | prNumeric<<32: + return wbNumeric, false, 90 + case wbHebrewLetter | prNumeric<<32: + return wbNumeric, false, 90 // WB10. - {wbNumeric, prALetter}: {wbALetter, wbDontBreak, 100}, - {wbNumeric, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 100}, + case wbNumeric | prALetter<<32: + return wbALetter, false, 100 + case wbNumeric | prHebrewLetter<<32: + return wbHebrewLetter, false, 100 // WB11. Transitions to wbWB11 handled by transitionWordBreakState(). - {wbWB11, prNumeric}: {wbNumeric, wbDontBreak, 110}, + case wbWB11 | prNumeric<<32: + return wbNumeric, false, 110 // WB13. - {wbAny, prKatakana}: {wbKatakana, wbBreak, 9990}, - {wbKatakana, prKatakana}: {wbKatakana, wbDontBreak, 130}, + case wbAny | prKatakana<<32: + return wbKatakana, true, 9990 + case wbKatakana | prKatakana<<32: + return wbKatakana, false, 130 // WB13a. - {wbAny, prExtendNumLet}: {wbExtendNumLet, wbBreak, 9990}, - {wbALetter, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131}, - {wbHebrewLetter, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131}, - {wbNumeric, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131}, - {wbKatakana, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131}, - {wbExtendNumLet, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131}, + case wbAny | prExtendNumLet<<32: + return wbExtendNumLet, true, 9990 + case wbALetter | prExtendNumLet<<32: + return wbExtendNumLet, false, 131 + case wbHebrewLetter | prExtendNumLet<<32: + return wbExtendNumLet, false, 131 + case wbNumeric | prExtendNumLet<<32: + return wbExtendNumLet, false, 131 + case wbKatakana | prExtendNumLet<<32: + return wbExtendNumLet, false, 131 + case wbExtendNumLet | prExtendNumLet<<32: + return wbExtendNumLet, false, 131 // WB13b. - {wbExtendNumLet, prALetter}: {wbALetter, wbDontBreak, 132}, - {wbExtendNumLet, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 132}, - {wbExtendNumLet, prNumeric}: {wbNumeric, wbDontBreak, 132}, - {wbExtendNumLet, prKatakana}: {prKatakana, wbDontBreak, 132}, + case wbExtendNumLet | prALetter<<32: + return wbALetter, false, 132 + case wbExtendNumLet | prHebrewLetter<<32: + return wbHebrewLetter, false, 132 + case wbExtendNumLet | prNumeric<<32: + return wbNumeric, false, 132 + case wbExtendNumLet | prKatakana<<32: + return wbKatakana, false, 132 + + default: + return -1, false, -1 + } } // transitionWordBreakState determines the new state of the word break parser @@ -141,30 +180,27 @@ func transitionWordBreakState(state int, r rune, b []byte, str string) (newState // Find the applicable transition in the table. var rule int - transition, ok := wbTransitions[[2]int{state, nextProperty}] - if ok { - // We have a specific transition. We'll use it. - newState, wordBreak, rule = transition[0], transition[1] == wbBreak, transition[2] - } else { + newState, wordBreak, rule = wbTransitions(state, nextProperty) + if newState < 0 { // No specific transition found. Try the less specific ones. - transAnyProp, okAnyProp := wbTransitions[[2]int{state, prAny}] - transAnyState, okAnyState := wbTransitions[[2]int{wbAny, nextProperty}] - if okAnyProp && okAnyState { + anyPropState, anyPropWordBreak, anyPropRule := wbTransitions(state, prAny) + anyStateState, anyStateWordBreak, anyStateRule := wbTransitions(wbAny, nextProperty) + if anyPropState >= 0 && anyStateState >= 0 { // Both apply. We'll use a mix (see comments for grTransitions). - newState, wordBreak, rule = transAnyState[0], transAnyState[1] == wbBreak, transAnyState[2] - if transAnyProp[2] < transAnyState[2] { - wordBreak, rule = transAnyProp[1] == wbBreak, transAnyProp[2] + newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule + if anyPropRule < anyStateRule { + wordBreak, rule = anyPropWordBreak, anyPropRule } - } else if okAnyProp { + } else if anyPropState >= 0 { // We only have a specific state. - newState, wordBreak, rule = transAnyProp[0], transAnyProp[1] == wbBreak, transAnyProp[2] + newState, wordBreak, rule = anyPropState, anyPropWordBreak, anyPropRule // This branch will probably never be reached because okAnyState will // always be true given the current transition map. But we keep it here // for future modifications to the transition map where this may not be // true anymore. - } else if okAnyState { + } else if anyStateState >= 0 { // We only have a specific property. - newState, wordBreak, rule = transAnyState[0], transAnyState[1] == wbBreak, transAnyState[2] + newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule } else { // No known transition. WB999: Any ÷ Any. newState, wordBreak, rule = wbAny, true, 9990 diff --git a/vendor/github.com/rogpeppe/go-internal/LICENSE b/vendor/github.com/rogpeppe/go-internal/LICENSE new file mode 100644 index 0000000000..49ea0f9288 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2018 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go new file mode 100644 index 0000000000..a8edafb3c3 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go @@ -0,0 +1,7 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +//go:generate go run $GOROOT/src/syscall/mksyscall_windows.go -output zsyscall_windows.go syscall_windows.go security_windows.go psapi_windows.go symlink_windows.go diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go new file mode 100644 index 0000000000..b138e658a9 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go @@ -0,0 +1,20 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +type PROCESS_MEMORY_COUNTERS struct { + CB uint32 + PageFaultCount uint32 + PeakWorkingSetSize uintptr + WorkingSetSize uintptr + QuotaPeakPagedPoolUsage uintptr + QuotaPagedPoolUsage uintptr + QuotaPeakNonPagedPoolUsage uintptr + QuotaNonPagedPoolUsage uintptr + PagefileUsage uintptr + PeakPagefileUsage uintptr +} + +//sys GetProcessMemoryInfo(handle syscall.Handle, memCounters *PROCESS_MEMORY_COUNTERS, cb uint32) (err error) = psapi.GetProcessMemoryInfo diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go new file mode 100644 index 0000000000..7c6ad8fb7e --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go @@ -0,0 +1,64 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +const ( + FSCTL_SET_REPARSE_POINT = 0x000900A4 + IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003 + + SYMLINK_FLAG_RELATIVE = 1 +) + +// These structures are described +// in https://msdn.microsoft.com/en-us/library/cc232007.aspx +// and https://msdn.microsoft.com/en-us/library/cc232006.aspx. + +// REPARSE_DATA_BUFFER_HEADER is a common part of REPARSE_DATA_BUFFER structure. +type REPARSE_DATA_BUFFER_HEADER struct { + ReparseTag uint32 + // The size, in bytes, of the reparse data that follows + // the common portion of the REPARSE_DATA_BUFFER element. + // This value is the length of the data starting at the + // SubstituteNameOffset field. + ReparseDataLength uint16 + Reserved uint16 +} + +type SymbolicLinkReparseBuffer struct { + // The integer that contains the offset, in bytes, + // of the substitute name string in the PathBuffer array, + // computed as an offset from byte 0 of PathBuffer. Note that + // this offset must be divided by 2 to get the array index. + SubstituteNameOffset uint16 + // The integer that contains the length, in bytes, of the + // substitute name string. If this string is null-terminated, + // SubstituteNameLength does not include the Unicode null character. + SubstituteNameLength uint16 + // PrintNameOffset is similar to SubstituteNameOffset. + PrintNameOffset uint16 + // PrintNameLength is similar to SubstituteNameLength. + PrintNameLength uint16 + // Flags specifies whether the substitute name is a full path name or + // a path name relative to the directory containing the symbolic link. + Flags uint32 + PathBuffer [1]uint16 +} + +type MountPointReparseBuffer struct { + // The integer that contains the offset, in bytes, + // of the substitute name string in the PathBuffer array, + // computed as an offset from byte 0 of PathBuffer. Note that + // this offset must be divided by 2 to get the array index. + SubstituteNameOffset uint16 + // The integer that contains the length, in bytes, of the + // substitute name string. If this string is null-terminated, + // SubstituteNameLength does not include the Unicode null character. + SubstituteNameLength uint16 + // PrintNameOffset is similar to SubstituteNameOffset. + PrintNameOffset uint16 + // PrintNameLength is similar to SubstituteNameLength. + PrintNameLength uint16 + PathBuffer [1]uint16 +} diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go new file mode 100644 index 0000000000..4a2dfc0c73 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go @@ -0,0 +1,128 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +import ( + "syscall" + "unsafe" +) + +const ( + SecurityAnonymous = 0 + SecurityIdentification = 1 + SecurityImpersonation = 2 + SecurityDelegation = 3 +) + +//sys ImpersonateSelf(impersonationlevel uint32) (err error) = advapi32.ImpersonateSelf +//sys RevertToSelf() (err error) = advapi32.RevertToSelf + +const ( + TOKEN_ADJUST_PRIVILEGES = 0x0020 + SE_PRIVILEGE_ENABLED = 0x00000002 +) + +type LUID struct { + LowPart uint32 + HighPart int32 +} + +type LUID_AND_ATTRIBUTES struct { + Luid LUID + Attributes uint32 +} + +type TOKEN_PRIVILEGES struct { + PrivilegeCount uint32 + Privileges [1]LUID_AND_ATTRIBUTES +} + +//sys OpenThreadToken(h syscall.Handle, access uint32, openasself bool, token *syscall.Token) (err error) = advapi32.OpenThreadToken +//sys LookupPrivilegeValue(systemname *uint16, name *uint16, luid *LUID) (err error) = advapi32.LookupPrivilegeValueW +//sys adjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) (ret uint32, err error) [true] = advapi32.AdjustTokenPrivileges + +func AdjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) error { + ret, err := adjustTokenPrivileges(token, disableAllPrivileges, newstate, buflen, prevstate, returnlen) + if ret == 0 { + // AdjustTokenPrivileges call failed + return err + } + // AdjustTokenPrivileges call succeeded + if err == syscall.EINVAL { + // GetLastError returned ERROR_SUCCESS + return nil + } + return err +} + +//sys DuplicateTokenEx(hExistingToken syscall.Token, dwDesiredAccess uint32, lpTokenAttributes *syscall.SecurityAttributes, impersonationLevel uint32, tokenType TokenType, phNewToken *syscall.Token) (err error) = advapi32.DuplicateTokenEx +//sys SetTokenInformation(tokenHandle syscall.Token, tokenInformationClass uint32, tokenInformation uintptr, tokenInformationLength uint32) (err error) = advapi32.SetTokenInformation + +type SID_AND_ATTRIBUTES struct { + Sid *syscall.SID + Attributes uint32 +} + +type TOKEN_MANDATORY_LABEL struct { + Label SID_AND_ATTRIBUTES +} + +func (tml *TOKEN_MANDATORY_LABEL) Size() uint32 { + return uint32(unsafe.Sizeof(TOKEN_MANDATORY_LABEL{})) + syscall.GetLengthSid(tml.Label.Sid) +} + +const SE_GROUP_INTEGRITY = 0x00000020 + +type TokenType uint32 + +const ( + TokenPrimary TokenType = 1 + TokenImpersonation TokenType = 2 +) + +//sys GetProfilesDirectory(dir *uint16, dirLen *uint32) (err error) = userenv.GetProfilesDirectoryW + +const ( + LG_INCLUDE_INDIRECT = 0x1 + MAX_PREFERRED_LENGTH = 0xFFFFFFFF +) + +type LocalGroupUserInfo0 struct { + Name *uint16 +} + +type UserInfo4 struct { + Name *uint16 + Password *uint16 + PasswordAge uint32 + Priv uint32 + HomeDir *uint16 + Comment *uint16 + Flags uint32 + ScriptPath *uint16 + AuthFlags uint32 + FullName *uint16 + UsrComment *uint16 + Parms *uint16 + Workstations *uint16 + LastLogon uint32 + LastLogoff uint32 + AcctExpires uint32 + MaxStorage uint32 + UnitsPerWeek uint32 + LogonHours *byte + BadPwCount uint32 + NumLogons uint32 + LogonServer *uint16 + CountryCode uint32 + CodePage uint32 + UserSid *syscall.SID + PrimaryGroupID uint32 + Profile *uint16 + HomeDirDrive *uint16 + PasswordExpired uint32 +} + +//sys NetUserGetLocalGroups(serverName *uint16, userName *uint16, level uint32, flags uint32, buf **byte, prefMaxLen uint32, entriesRead *uint32, totalEntries *uint32) (neterr error) = netapi32.NetUserGetLocalGroups diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go new file mode 100644 index 0000000000..b64d058d13 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go @@ -0,0 +1,39 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +import "syscall" + +const ( + ERROR_INVALID_PARAMETER syscall.Errno = 87 + + // symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972) + SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x2 + + // FileInformationClass values + FileBasicInfo = 0 // FILE_BASIC_INFO + FileStandardInfo = 1 // FILE_STANDARD_INFO + FileNameInfo = 2 // FILE_NAME_INFO + FileStreamInfo = 7 // FILE_STREAM_INFO + FileCompressionInfo = 8 // FILE_COMPRESSION_INFO + FileAttributeTagInfo = 9 // FILE_ATTRIBUTE_TAG_INFO + FileIdBothDirectoryInfo = 0xa // FILE_ID_BOTH_DIR_INFO + FileIdBothDirectoryRestartInfo = 0xb // FILE_ID_BOTH_DIR_INFO + FileRemoteProtocolInfo = 0xd // FILE_REMOTE_PROTOCOL_INFO + FileFullDirectoryInfo = 0xe // FILE_FULL_DIR_INFO + FileFullDirectoryRestartInfo = 0xf // FILE_FULL_DIR_INFO + FileStorageInfo = 0x10 // FILE_STORAGE_INFO + FileAlignmentInfo = 0x11 // FILE_ALIGNMENT_INFO + FileIdInfo = 0x12 // FILE_ID_INFO + FileIdExtdDirectoryInfo = 0x13 // FILE_ID_EXTD_DIR_INFO + FileIdExtdDirectoryRestartInfo = 0x14 // FILE_ID_EXTD_DIR_INFO +) + +type FILE_ATTRIBUTE_TAG_INFO struct { + FileAttributes uint32 + ReparseTag uint32 +} + +//sys GetFileInformationByHandleEx(handle syscall.Handle, class uint32, info *byte, bufsize uint32) (err error) diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go new file mode 100644 index 0000000000..121132f6f7 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go @@ -0,0 +1,307 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package windows + +import ( + "sync" + "syscall" + "unsafe" +) + +const ( + ERROR_SHARING_VIOLATION syscall.Errno = 32 + ERROR_LOCK_VIOLATION syscall.Errno = 33 + ERROR_NOT_SUPPORTED syscall.Errno = 50 + ERROR_CALL_NOT_IMPLEMENTED syscall.Errno = 120 + ERROR_INVALID_NAME syscall.Errno = 123 + ERROR_LOCK_FAILED syscall.Errno = 167 + ERROR_NO_UNICODE_TRANSLATION syscall.Errno = 1113 +) + +const GAA_FLAG_INCLUDE_PREFIX = 0x00000010 + +const ( + IF_TYPE_OTHER = 1 + IF_TYPE_ETHERNET_CSMACD = 6 + IF_TYPE_ISO88025_TOKENRING = 9 + IF_TYPE_PPP = 23 + IF_TYPE_SOFTWARE_LOOPBACK = 24 + IF_TYPE_ATM = 37 + IF_TYPE_IEEE80211 = 71 + IF_TYPE_TUNNEL = 131 + IF_TYPE_IEEE1394 = 144 +) + +type SocketAddress struct { + Sockaddr *syscall.RawSockaddrAny + SockaddrLength int32 +} + +type IpAdapterUnicastAddress struct { + Length uint32 + Flags uint32 + Next *IpAdapterUnicastAddress + Address SocketAddress + PrefixOrigin int32 + SuffixOrigin int32 + DadState int32 + ValidLifetime uint32 + PreferredLifetime uint32 + LeaseLifetime uint32 + OnLinkPrefixLength uint8 +} + +type IpAdapterAnycastAddress struct { + Length uint32 + Flags uint32 + Next *IpAdapterAnycastAddress + Address SocketAddress +} + +type IpAdapterMulticastAddress struct { + Length uint32 + Flags uint32 + Next *IpAdapterMulticastAddress + Address SocketAddress +} + +type IpAdapterDnsServerAdapter struct { + Length uint32 + Reserved uint32 + Next *IpAdapterDnsServerAdapter + Address SocketAddress +} + +type IpAdapterPrefix struct { + Length uint32 + Flags uint32 + Next *IpAdapterPrefix + Address SocketAddress + PrefixLength uint32 +} + +type IpAdapterAddresses struct { + Length uint32 + IfIndex uint32 + Next *IpAdapterAddresses + AdapterName *byte + FirstUnicastAddress *IpAdapterUnicastAddress + FirstAnycastAddress *IpAdapterAnycastAddress + FirstMulticastAddress *IpAdapterMulticastAddress + FirstDnsServerAddress *IpAdapterDnsServerAdapter + DnsSuffix *uint16 + Description *uint16 + FriendlyName *uint16 + PhysicalAddress [syscall.MAX_ADAPTER_ADDRESS_LENGTH]byte + PhysicalAddressLength uint32 + Flags uint32 + Mtu uint32 + IfType uint32 + OperStatus uint32 + Ipv6IfIndex uint32 + ZoneIndices [16]uint32 + FirstPrefix *IpAdapterPrefix + /* more fields might be present here. */ +} + +const ( + IfOperStatusUp = 1 + IfOperStatusDown = 2 + IfOperStatusTesting = 3 + IfOperStatusUnknown = 4 + IfOperStatusDormant = 5 + IfOperStatusNotPresent = 6 + IfOperStatusLowerLayerDown = 7 +) + +//sys GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) = iphlpapi.GetAdaptersAddresses +//sys GetComputerNameEx(nameformat uint32, buf *uint16, n *uint32) (err error) = GetComputerNameExW +//sys MoveFileEx(from *uint16, to *uint16, flags uint32) (err error) = MoveFileExW +//sys GetModuleFileName(module syscall.Handle, fn *uint16, len uint32) (n uint32, err error) = kernel32.GetModuleFileNameW + +const ( + WSA_FLAG_OVERLAPPED = 0x01 + WSA_FLAG_NO_HANDLE_INHERIT = 0x80 + + WSAEMSGSIZE syscall.Errno = 10040 + + MSG_PEEK = 0x2 + MSG_TRUNC = 0x0100 + MSG_CTRUNC = 0x0200 + + socket_error = uintptr(^uint32(0)) +) + +var WSAID_WSASENDMSG = syscall.GUID{ + Data1: 0xa441e712, + Data2: 0x754f, + Data3: 0x43ca, + Data4: [8]byte{0x84, 0xa7, 0x0d, 0xee, 0x44, 0xcf, 0x60, 0x6d}, +} + +var WSAID_WSARECVMSG = syscall.GUID{ + Data1: 0xf689d7c8, + Data2: 0x6f1f, + Data3: 0x436b, + Data4: [8]byte{0x8a, 0x53, 0xe5, 0x4f, 0xe3, 0x51, 0xc3, 0x22}, +} + +var sendRecvMsgFunc struct { + once sync.Once + sendAddr uintptr + recvAddr uintptr + err error +} + +type WSAMsg struct { + Name *syscall.RawSockaddrAny + Namelen int32 + Buffers *syscall.WSABuf + BufferCount uint32 + Control syscall.WSABuf + Flags uint32 +} + +//sys WSASocket(af int32, typ int32, protocol int32, protinfo *syscall.WSAProtocolInfo, group uint32, flags uint32) (handle syscall.Handle, err error) [failretval==syscall.InvalidHandle] = ws2_32.WSASocketW + +func loadWSASendRecvMsg() error { + sendRecvMsgFunc.once.Do(func() { + var s syscall.Handle + s, sendRecvMsgFunc.err = syscall.Socket(syscall.AF_INET, syscall.SOCK_DGRAM, syscall.IPPROTO_UDP) + if sendRecvMsgFunc.err != nil { + return + } + defer syscall.CloseHandle(s) + var n uint32 + sendRecvMsgFunc.err = syscall.WSAIoctl(s, + syscall.SIO_GET_EXTENSION_FUNCTION_POINTER, + (*byte)(unsafe.Pointer(&WSAID_WSARECVMSG)), + uint32(unsafe.Sizeof(WSAID_WSARECVMSG)), + (*byte)(unsafe.Pointer(&sendRecvMsgFunc.recvAddr)), + uint32(unsafe.Sizeof(sendRecvMsgFunc.recvAddr)), + &n, nil, 0) + if sendRecvMsgFunc.err != nil { + return + } + sendRecvMsgFunc.err = syscall.WSAIoctl(s, + syscall.SIO_GET_EXTENSION_FUNCTION_POINTER, + (*byte)(unsafe.Pointer(&WSAID_WSASENDMSG)), + uint32(unsafe.Sizeof(WSAID_WSASENDMSG)), + (*byte)(unsafe.Pointer(&sendRecvMsgFunc.sendAddr)), + uint32(unsafe.Sizeof(sendRecvMsgFunc.sendAddr)), + &n, nil, 0) + }) + return sendRecvMsgFunc.err +} + +func WSASendMsg(fd syscall.Handle, msg *WSAMsg, flags uint32, bytesSent *uint32, overlapped *syscall.Overlapped, croutine *byte) error { + err := loadWSASendRecvMsg() + if err != nil { + return err + } + r1, _, e1 := syscall.Syscall6(sendRecvMsgFunc.sendAddr, 6, uintptr(fd), uintptr(unsafe.Pointer(msg)), uintptr(flags), uintptr(unsafe.Pointer(bytesSent)), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(croutine))) + if r1 == socket_error { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return err +} + +func WSARecvMsg(fd syscall.Handle, msg *WSAMsg, bytesReceived *uint32, overlapped *syscall.Overlapped, croutine *byte) error { + err := loadWSASendRecvMsg() + if err != nil { + return err + } + r1, _, e1 := syscall.Syscall6(sendRecvMsgFunc.recvAddr, 5, uintptr(fd), uintptr(unsafe.Pointer(msg)), uintptr(unsafe.Pointer(bytesReceived)), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(croutine)), 0) + if r1 == socket_error { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return err +} + +const ( + ComputerNameNetBIOS = 0 + ComputerNameDnsHostname = 1 + ComputerNameDnsDomain = 2 + ComputerNameDnsFullyQualified = 3 + ComputerNamePhysicalNetBIOS = 4 + ComputerNamePhysicalDnsHostname = 5 + ComputerNamePhysicalDnsDomain = 6 + ComputerNamePhysicalDnsFullyQualified = 7 + ComputerNameMax = 8 + + MOVEFILE_REPLACE_EXISTING = 0x1 + MOVEFILE_COPY_ALLOWED = 0x2 + MOVEFILE_DELAY_UNTIL_REBOOT = 0x4 + MOVEFILE_WRITE_THROUGH = 0x8 + MOVEFILE_CREATE_HARDLINK = 0x10 + MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x20 +) + +func Rename(oldpath, newpath string) error { + from, err := syscall.UTF16PtrFromString(oldpath) + if err != nil { + return err + } + to, err := syscall.UTF16PtrFromString(newpath) + if err != nil { + return err + } + return MoveFileEx(from, to, MOVEFILE_REPLACE_EXISTING) +} + +//sys LockFileEx(file syscall.Handle, flags uint32, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) = kernel32.LockFileEx +//sys UnlockFileEx(file syscall.Handle, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) = kernel32.UnlockFileEx + +const ( + LOCKFILE_FAIL_IMMEDIATELY = 0x00000001 + LOCKFILE_EXCLUSIVE_LOCK = 0x00000002 +) + +const MB_ERR_INVALID_CHARS = 8 + +//sys GetACP() (acp uint32) = kernel32.GetACP +//sys GetConsoleCP() (ccp uint32) = kernel32.GetConsoleCP +//sys MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) = kernel32.MultiByteToWideChar +//sys GetCurrentThread() (pseudoHandle syscall.Handle, err error) = kernel32.GetCurrentThread + +const STYPE_DISKTREE = 0x00 + +type SHARE_INFO_2 struct { + Netname *uint16 + Type uint32 + Remark *uint16 + Permissions uint32 + MaxUses uint32 + CurrentUses uint32 + Path *uint16 + Passwd *uint16 +} + +//sys NetShareAdd(serverName *uint16, level uint32, buf *byte, parmErr *uint16) (neterr error) = netapi32.NetShareAdd +//sys NetShareDel(serverName *uint16, netName *uint16, reserved uint32) (neterr error) = netapi32.NetShareDel + +const ( + FILE_NAME_NORMALIZED = 0x0 + FILE_NAME_OPENED = 0x8 + + VOLUME_NAME_DOS = 0x0 + VOLUME_NAME_GUID = 0x1 + VOLUME_NAME_NONE = 0x4 + VOLUME_NAME_NT = 0x2 +) + +//sys GetFinalPathNameByHandle(file syscall.Handle, filePath *uint16, filePathSize uint32, flags uint32) (n uint32, err error) = kernel32.GetFinalPathNameByHandleW + +func LoadGetFinalPathNameByHandle() error { + return procGetFinalPathNameByHandleW.Find() +} diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go new file mode 100644 index 0000000000..4e0018f387 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go @@ -0,0 +1,28 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package sysdll is an internal leaf package that records and reports +// which Windows DLL names are used by Go itself. These DLLs are then +// only loaded from the System32 directory. See Issue 14959. +package sysdll + +// IsSystemDLL reports whether the named dll key (a base name, like +// "foo.dll") is a system DLL which should only be loaded from the +// Windows SYSTEM32 directory. +// +// Filenames are case sensitive, but that doesn't matter because +// the case registered with Add is also the same case used with +// LoadDLL later. +// +// It has no associated mutex and should only be mutated serially +// (currently: during init), and not concurrent with DLL loading. +var IsSystemDLL = map[string]bool{} + +// Add notes that dll is a system32 DLL which should only be loaded +// from the Windows SYSTEM32 directory. It returns its argument back, +// for ease of use in generated code. +func Add(dll string) string { + IsSystemDLL[dll] = true + return dll +} diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go new file mode 100644 index 0000000000..3ed2d9fe01 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go @@ -0,0 +1,363 @@ +// Code generated by 'go generate'; DO NOT EDIT. + +package windows + +import ( + "syscall" + "unsafe" + + "github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll" +) + +var _ unsafe.Pointer + +// Do the interface allocations only once for common +// Errno values. +const ( + errnoERROR_IO_PENDING = 997 +) + +var ( + errERROR_IO_PENDING error = syscall.Errno(errnoERROR_IO_PENDING) +) + +// errnoErr returns common boxed Errno values, to prevent +// allocations at runtime. +func errnoErr(e syscall.Errno) error { + switch e { + case 0: + return nil + case errnoERROR_IO_PENDING: + return errERROR_IO_PENDING + } + // TODO: add more here, after collecting data on the common + // error values see on Windows. (perhaps when running + // all.bat?) + return e +} + +var ( + modiphlpapi = syscall.NewLazyDLL(sysdll.Add("iphlpapi.dll")) + modkernel32 = syscall.NewLazyDLL(sysdll.Add("kernel32.dll")) + modws2_32 = syscall.NewLazyDLL(sysdll.Add("ws2_32.dll")) + modnetapi32 = syscall.NewLazyDLL(sysdll.Add("netapi32.dll")) + modadvapi32 = syscall.NewLazyDLL(sysdll.Add("advapi32.dll")) + moduserenv = syscall.NewLazyDLL(sysdll.Add("userenv.dll")) + modpsapi = syscall.NewLazyDLL(sysdll.Add("psapi.dll")) + + procGetAdaptersAddresses = modiphlpapi.NewProc("GetAdaptersAddresses") + procGetComputerNameExW = modkernel32.NewProc("GetComputerNameExW") + procMoveFileExW = modkernel32.NewProc("MoveFileExW") + procGetModuleFileNameW = modkernel32.NewProc("GetModuleFileNameW") + procWSASocketW = modws2_32.NewProc("WSASocketW") + procLockFileEx = modkernel32.NewProc("LockFileEx") + procUnlockFileEx = modkernel32.NewProc("UnlockFileEx") + procGetACP = modkernel32.NewProc("GetACP") + procGetConsoleCP = modkernel32.NewProc("GetConsoleCP") + procMultiByteToWideChar = modkernel32.NewProc("MultiByteToWideChar") + procGetCurrentThread = modkernel32.NewProc("GetCurrentThread") + procNetShareAdd = modnetapi32.NewProc("NetShareAdd") + procNetShareDel = modnetapi32.NewProc("NetShareDel") + procGetFinalPathNameByHandleW = modkernel32.NewProc("GetFinalPathNameByHandleW") + procImpersonateSelf = modadvapi32.NewProc("ImpersonateSelf") + procRevertToSelf = modadvapi32.NewProc("RevertToSelf") + procOpenThreadToken = modadvapi32.NewProc("OpenThreadToken") + procLookupPrivilegeValueW = modadvapi32.NewProc("LookupPrivilegeValueW") + procAdjustTokenPrivileges = modadvapi32.NewProc("AdjustTokenPrivileges") + procDuplicateTokenEx = modadvapi32.NewProc("DuplicateTokenEx") + procSetTokenInformation = modadvapi32.NewProc("SetTokenInformation") + procGetProfilesDirectoryW = moduserenv.NewProc("GetProfilesDirectoryW") + procNetUserGetLocalGroups = modnetapi32.NewProc("NetUserGetLocalGroups") + procGetProcessMemoryInfo = modpsapi.NewProc("GetProcessMemoryInfo") + procGetFileInformationByHandleEx = modkernel32.NewProc("GetFileInformationByHandleEx") +) + +func GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) { + r0, _, _ := syscall.Syscall6(procGetAdaptersAddresses.Addr(), 5, uintptr(family), uintptr(flags), uintptr(reserved), uintptr(unsafe.Pointer(adapterAddresses)), uintptr(unsafe.Pointer(sizePointer)), 0) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + +func GetComputerNameEx(nameformat uint32, buf *uint16, n *uint32) (err error) { + r1, _, e1 := syscall.Syscall(procGetComputerNameExW.Addr(), 3, uintptr(nameformat), uintptr(unsafe.Pointer(buf)), uintptr(unsafe.Pointer(n))) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func MoveFileEx(from *uint16, to *uint16, flags uint32) (err error) { + r1, _, e1 := syscall.Syscall(procMoveFileExW.Addr(), 3, uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(to)), uintptr(flags)) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func GetModuleFileName(module syscall.Handle, fn *uint16, len uint32) (n uint32, err error) { + r0, _, e1 := syscall.Syscall(procGetModuleFileNameW.Addr(), 3, uintptr(module), uintptr(unsafe.Pointer(fn)), uintptr(len)) + n = uint32(r0) + if n == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func WSASocket(af int32, typ int32, protocol int32, protinfo *syscall.WSAProtocolInfo, group uint32, flags uint32) (handle syscall.Handle, err error) { + r0, _, e1 := syscall.Syscall6(procWSASocketW.Addr(), 6, uintptr(af), uintptr(typ), uintptr(protocol), uintptr(unsafe.Pointer(protinfo)), uintptr(group), uintptr(flags)) + handle = syscall.Handle(r0) + if handle == syscall.InvalidHandle { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func LockFileEx(file syscall.Handle, flags uint32, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) { + r1, _, e1 := syscall.Syscall6(procLockFileEx.Addr(), 6, uintptr(file), uintptr(flags), uintptr(reserved), uintptr(bytesLow), uintptr(bytesHigh), uintptr(unsafe.Pointer(overlapped))) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func UnlockFileEx(file syscall.Handle, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) { + r1, _, e1 := syscall.Syscall6(procUnlockFileEx.Addr(), 5, uintptr(file), uintptr(reserved), uintptr(bytesLow), uintptr(bytesHigh), uintptr(unsafe.Pointer(overlapped)), 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func GetACP() (acp uint32) { + r0, _, _ := syscall.Syscall(procGetACP.Addr(), 0, 0, 0, 0) + acp = uint32(r0) + return +} + +func GetConsoleCP() (ccp uint32) { + r0, _, _ := syscall.Syscall(procGetConsoleCP.Addr(), 0, 0, 0, 0) + ccp = uint32(r0) + return +} + +func MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) { + r0, _, e1 := syscall.Syscall6(procMultiByteToWideChar.Addr(), 6, uintptr(codePage), uintptr(dwFlags), uintptr(unsafe.Pointer(str)), uintptr(nstr), uintptr(unsafe.Pointer(wchar)), uintptr(nwchar)) + nwrite = int32(r0) + if nwrite == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func GetCurrentThread() (pseudoHandle syscall.Handle, err error) { + r0, _, e1 := syscall.Syscall(procGetCurrentThread.Addr(), 0, 0, 0, 0) + pseudoHandle = syscall.Handle(r0) + if pseudoHandle == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func NetShareAdd(serverName *uint16, level uint32, buf *byte, parmErr *uint16) (neterr error) { + r0, _, _ := syscall.Syscall6(procNetShareAdd.Addr(), 4, uintptr(unsafe.Pointer(serverName)), uintptr(level), uintptr(unsafe.Pointer(buf)), uintptr(unsafe.Pointer(parmErr)), 0, 0) + if r0 != 0 { + neterr = syscall.Errno(r0) + } + return +} + +func NetShareDel(serverName *uint16, netName *uint16, reserved uint32) (neterr error) { + r0, _, _ := syscall.Syscall(procNetShareDel.Addr(), 3, uintptr(unsafe.Pointer(serverName)), uintptr(unsafe.Pointer(netName)), uintptr(reserved)) + if r0 != 0 { + neterr = syscall.Errno(r0) + } + return +} + +func GetFinalPathNameByHandle(file syscall.Handle, filePath *uint16, filePathSize uint32, flags uint32) (n uint32, err error) { + r0, _, e1 := syscall.Syscall6(procGetFinalPathNameByHandleW.Addr(), 4, uintptr(file), uintptr(unsafe.Pointer(filePath)), uintptr(filePathSize), uintptr(flags), 0, 0) + n = uint32(r0) + if n == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func ImpersonateSelf(impersonationlevel uint32) (err error) { + r1, _, e1 := syscall.Syscall(procImpersonateSelf.Addr(), 1, uintptr(impersonationlevel), 0, 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func RevertToSelf() (err error) { + r1, _, e1 := syscall.Syscall(procRevertToSelf.Addr(), 0, 0, 0, 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func OpenThreadToken(h syscall.Handle, access uint32, openasself bool, token *syscall.Token) (err error) { + var _p0 uint32 + if openasself { + _p0 = 1 + } else { + _p0 = 0 + } + r1, _, e1 := syscall.Syscall6(procOpenThreadToken.Addr(), 4, uintptr(h), uintptr(access), uintptr(_p0), uintptr(unsafe.Pointer(token)), 0, 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func LookupPrivilegeValue(systemname *uint16, name *uint16, luid *LUID) (err error) { + r1, _, e1 := syscall.Syscall(procLookupPrivilegeValueW.Addr(), 3, uintptr(unsafe.Pointer(systemname)), uintptr(unsafe.Pointer(name)), uintptr(unsafe.Pointer(luid))) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func adjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) (ret uint32, err error) { + var _p0 uint32 + if disableAllPrivileges { + _p0 = 1 + } else { + _p0 = 0 + } + r0, _, e1 := syscall.Syscall6(procAdjustTokenPrivileges.Addr(), 6, uintptr(token), uintptr(_p0), uintptr(unsafe.Pointer(newstate)), uintptr(buflen), uintptr(unsafe.Pointer(prevstate)), uintptr(unsafe.Pointer(returnlen))) + ret = uint32(r0) + if true { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func DuplicateTokenEx(hExistingToken syscall.Token, dwDesiredAccess uint32, lpTokenAttributes *syscall.SecurityAttributes, impersonationLevel uint32, tokenType TokenType, phNewToken *syscall.Token) (err error) { + r1, _, e1 := syscall.Syscall6(procDuplicateTokenEx.Addr(), 6, uintptr(hExistingToken), uintptr(dwDesiredAccess), uintptr(unsafe.Pointer(lpTokenAttributes)), uintptr(impersonationLevel), uintptr(tokenType), uintptr(unsafe.Pointer(phNewToken))) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func SetTokenInformation(tokenHandle syscall.Token, tokenInformationClass uint32, tokenInformation uintptr, tokenInformationLength uint32) (err error) { + r1, _, e1 := syscall.Syscall6(procSetTokenInformation.Addr(), 4, uintptr(tokenHandle), uintptr(tokenInformationClass), uintptr(tokenInformation), uintptr(tokenInformationLength), 0, 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func GetProfilesDirectory(dir *uint16, dirLen *uint32) (err error) { + r1, _, e1 := syscall.Syscall(procGetProfilesDirectoryW.Addr(), 2, uintptr(unsafe.Pointer(dir)), uintptr(unsafe.Pointer(dirLen)), 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func NetUserGetLocalGroups(serverName *uint16, userName *uint16, level uint32, flags uint32, buf **byte, prefMaxLen uint32, entriesRead *uint32, totalEntries *uint32) (neterr error) { + r0, _, _ := syscall.Syscall9(procNetUserGetLocalGroups.Addr(), 8, uintptr(unsafe.Pointer(serverName)), uintptr(unsafe.Pointer(userName)), uintptr(level), uintptr(flags), uintptr(unsafe.Pointer(buf)), uintptr(prefMaxLen), uintptr(unsafe.Pointer(entriesRead)), uintptr(unsafe.Pointer(totalEntries)), 0) + if r0 != 0 { + neterr = syscall.Errno(r0) + } + return +} + +func GetProcessMemoryInfo(handle syscall.Handle, memCounters *PROCESS_MEMORY_COUNTERS, cb uint32) (err error) { + r1, _, e1 := syscall.Syscall(procGetProcessMemoryInfo.Addr(), 3, uintptr(handle), uintptr(unsafe.Pointer(memCounters)), uintptr(cb)) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} + +func GetFileInformationByHandleEx(handle syscall.Handle, class uint32, info *byte, bufsize uint32) (err error) { + r1, _, e1 := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(), 4, uintptr(handle), uintptr(class), uintptr(unsafe.Pointer(info)), uintptr(bufsize), 0, 0) + if r1 == 0 { + if e1 != 0 { + err = errnoErr(e1) + } else { + err = syscall.EINVAL + } + } + return +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go new file mode 100644 index 0000000000..05f27c321a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go @@ -0,0 +1,99 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package filelock provides a platform-independent API for advisory file +// locking. Calls to functions in this package on platforms that do not support +// advisory locks will return errors for which IsNotSupported returns true. +package filelock + +import ( + "errors" + "io/fs" + "os" +) + +// A File provides the minimal set of methods required to lock an open file. +// File implementations must be usable as map keys. +// The usual implementation is *os.File. +type File interface { + // Name returns the name of the file. + Name() string + + // Fd returns a valid file descriptor. + // (If the File is an *os.File, it must not be closed.) + Fd() uintptr + + // Stat returns the FileInfo structure describing file. + Stat() (fs.FileInfo, error) +} + +// Lock places an advisory write lock on the file, blocking until it can be +// locked. +// +// If Lock returns nil, no other process will be able to place a read or write +// lock on the file until this process exits, closes f, or calls Unlock on it. +// +// If f's descriptor is already read- or write-locked, the behavior of Lock is +// unspecified. +// +// Closing the file may or may not release the lock promptly. Callers should +// ensure that Unlock is always called when Lock succeeds. +func Lock(f File) error { + return lock(f, writeLock) +} + +// RLock places an advisory read lock on the file, blocking until it can be locked. +// +// If RLock returns nil, no other process will be able to place a write lock on +// the file until this process exits, closes f, or calls Unlock on it. +// +// If f is already read- or write-locked, the behavior of RLock is unspecified. +// +// Closing the file may or may not release the lock promptly. Callers should +// ensure that Unlock is always called if RLock succeeds. +func RLock(f File) error { + return lock(f, readLock) +} + +// Unlock removes an advisory lock placed on f by this process. +// +// The caller must not attempt to unlock a file that is not locked. +func Unlock(f File) error { + return unlock(f) +} + +// String returns the name of the function corresponding to lt +// (Lock, RLock, or Unlock). +func (lt lockType) String() string { + switch lt { + case readLock: + return "RLock" + case writeLock: + return "Lock" + default: + return "Unlock" + } +} + +// IsNotSupported returns a boolean indicating whether the error is known to +// report that a function is not supported (possibly for a specific input). +// It is satisfied by ErrNotSupported as well as some syscall errors. +func IsNotSupported(err error) bool { + return isNotSupported(underlyingError(err)) +} + +var ErrNotSupported = errors.New("operation not supported") + +// underlyingError returns the underlying error for known os error types. +func underlyingError(err error) error { + switch err := err.(type) { + case *fs.PathError: + return err.Err + case *os.LinkError: + return err.Err + case *os.SyscallError: + return err.Err + } + return err +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go new file mode 100644 index 0000000000..8568048507 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go @@ -0,0 +1,214 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build aix || (solaris && !illumos) + +// This code implements the filelock API using POSIX 'fcntl' locks, which attach +// to an (inode, process) pair rather than a file descriptor. To avoid unlocking +// files prematurely when the same file is opened through different descriptors, +// we allow only one read-lock at a time. +// +// Most platforms provide some alternative API, such as an 'flock' system call +// or an F_OFD_SETLK command for 'fcntl', that allows for better concurrency and +// does not require per-inode bookkeeping in the application. + +package filelock + +import ( + "errors" + "io" + "io/fs" + "math/rand" + "sync" + "syscall" + "time" +) + +type lockType int16 + +const ( + readLock lockType = syscall.F_RDLCK + writeLock lockType = syscall.F_WRLCK +) + +type inode = uint64 // type of syscall.Stat_t.Ino + +type inodeLock struct { + owner File + queue []<-chan File +} + +var ( + mu sync.Mutex + inodes = map[File]inode{} + locks = map[inode]inodeLock{} +) + +func lock(f File, lt lockType) (err error) { + // POSIX locks apply per inode and process, and the lock for an inode is + // released when *any* descriptor for that inode is closed. So we need to + // synchronize access to each inode internally, and must serialize lock and + // unlock calls that refer to the same inode through different descriptors. + fi, err := f.Stat() + if err != nil { + return err + } + ino := fi.Sys().(*syscall.Stat_t).Ino + + mu.Lock() + if i, dup := inodes[f]; dup && i != ino { + mu.Unlock() + return &fs.PathError{ + Op: lt.String(), + Path: f.Name(), + Err: errors.New("inode for file changed since last Lock or RLock"), + } + } + inodes[f] = ino + + var wait chan File + l := locks[ino] + if l.owner == f { + // This file already owns the lock, but the call may change its lock type. + } else if l.owner == nil { + // No owner: it's ours now. + l.owner = f + } else { + // Already owned: add a channel to wait on. + wait = make(chan File) + l.queue = append(l.queue, wait) + } + locks[ino] = l + mu.Unlock() + + if wait != nil { + wait <- f + } + + // Spurious EDEADLK errors arise on platforms that compute deadlock graphs at + // the process, rather than thread, level. Consider processes P and Q, with + // threads P.1, P.2, and Q.3. The following trace is NOT a deadlock, but will be + // reported as a deadlock on systems that consider only process granularity: + // + // P.1 locks file A. + // Q.3 locks file B. + // Q.3 blocks on file A. + // P.2 blocks on file B. (This is erroneously reported as a deadlock.) + // P.1 unlocks file A. + // Q.3 unblocks and locks file A. + // Q.3 unlocks files A and B. + // P.2 unblocks and locks file B. + // P.2 unlocks file B. + // + // These spurious errors were observed in practice on AIX and Solaris in + // cmd/go: see https://golang.org/issue/32817. + // + // We work around this bug by treating EDEADLK as always spurious. If there + // really is a lock-ordering bug between the interacting processes, it will + // become a livelock instead, but that's not appreciably worse than if we had + // a proper flock implementation (which generally does not even attempt to + // diagnose deadlocks). + // + // In the above example, that changes the trace to: + // + // P.1 locks file A. + // Q.3 locks file B. + // Q.3 blocks on file A. + // P.2 spuriously fails to lock file B and goes to sleep. + // P.1 unlocks file A. + // Q.3 unblocks and locks file A. + // Q.3 unlocks files A and B. + // P.2 wakes up and locks file B. + // P.2 unlocks file B. + // + // We know that the retry loop will not introduce a *spurious* livelock + // because, according to the POSIX specification, EDEADLK is only to be + // returned when “the lock is blocked by a lock from another process”. + // If that process is blocked on some lock that we are holding, then the + // resulting livelock is due to a real deadlock (and would manifest as such + // when using, for example, the flock implementation of this package). + // If the other process is *not* blocked on some other lock that we are + // holding, then it will eventually release the requested lock. + + nextSleep := 1 * time.Millisecond + const maxSleep = 500 * time.Millisecond + for { + err = setlkw(f.Fd(), lt) + if err != syscall.EDEADLK { + break + } + time.Sleep(nextSleep) + + nextSleep += nextSleep + if nextSleep > maxSleep { + nextSleep = maxSleep + } + // Apply 10% jitter to avoid synchronizing collisions when we finally unblock. + nextSleep += time.Duration((0.1*rand.Float64() - 0.05) * float64(nextSleep)) + } + + if err != nil { + unlock(f) + return &fs.PathError{ + Op: lt.String(), + Path: f.Name(), + Err: err, + } + } + + return nil +} + +func unlock(f File) error { + var owner File + + mu.Lock() + ino, ok := inodes[f] + if ok { + owner = locks[ino].owner + } + mu.Unlock() + + if owner != f { + panic("unlock called on a file that is not locked") + } + + err := setlkw(f.Fd(), syscall.F_UNLCK) + + mu.Lock() + l := locks[ino] + if len(l.queue) == 0 { + // No waiters: remove the map entry. + delete(locks, ino) + } else { + // The first waiter is sending us their file now. + // Receive it and update the queue. + l.owner = <-l.queue[0] + l.queue = l.queue[1:] + locks[ino] = l + } + delete(inodes, f) + mu.Unlock() + + return err +} + +// setlkw calls FcntlFlock with F_SETLKW for the entire file indicated by fd. +func setlkw(fd uintptr, lt lockType) error { + for { + err := syscall.FcntlFlock(fd, syscall.F_SETLKW, &syscall.Flock_t{ + Type: int16(lt), + Whence: io.SeekStart, + Start: 0, + Len: 0, // All bytes. + }) + if err != syscall.EINTR { + return err + } + } +} + +func isNotSupported(err error) bool { + return err == syscall.ENOSYS || err == syscall.ENOTSUP || err == syscall.EOPNOTSUPP || err == ErrNotSupported +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go new file mode 100644 index 0000000000..7bdd62bd9b --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go @@ -0,0 +1,36 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !unix && !windows + +package filelock + +import "io/fs" + +type lockType int8 + +const ( + readLock = iota + 1 + writeLock +) + +func lock(f File, lt lockType) error { + return &fs.PathError{ + Op: lt.String(), + Path: f.Name(), + Err: ErrNotSupported, + } +} + +func unlock(f File) error { + return &fs.PathError{ + Op: "Unlock", + Path: f.Name(), + Err: ErrNotSupported, + } +} + +func isNotSupported(err error) bool { + return err == ErrNotSupported +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go new file mode 100644 index 0000000000..d7778d05de --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go @@ -0,0 +1,44 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build darwin || dragonfly || freebsd || illumos || linux || netbsd || openbsd + +package filelock + +import ( + "io/fs" + "syscall" +) + +type lockType int16 + +const ( + readLock lockType = syscall.LOCK_SH + writeLock lockType = syscall.LOCK_EX +) + +func lock(f File, lt lockType) (err error) { + for { + err = syscall.Flock(int(f.Fd()), int(lt)) + if err != syscall.EINTR { + break + } + } + if err != nil { + return &fs.PathError{ + Op: lt.String(), + Path: f.Name(), + Err: err, + } + } + return nil +} + +func unlock(f File) error { + return lock(f, syscall.LOCK_UN) +} + +func isNotSupported(err error) bool { + return err == syscall.ENOSYS || err == syscall.ENOTSUP || err == syscall.EOPNOTSUPP || err == ErrNotSupported +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go new file mode 100644 index 0000000000..ceab65b02a --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go @@ -0,0 +1,67 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build windows + +package filelock + +import ( + "io/fs" + "syscall" + + "github.com/rogpeppe/go-internal/internal/syscall/windows" +) + +type lockType uint32 + +const ( + readLock lockType = 0 + writeLock lockType = windows.LOCKFILE_EXCLUSIVE_LOCK +) + +const ( + reserved = 0 + allBytes = ^uint32(0) +) + +func lock(f File, lt lockType) error { + // Per https://golang.org/issue/19098, “Programs currently expect the Fd + // method to return a handle that uses ordinary synchronous I/O.” + // However, LockFileEx still requires an OVERLAPPED structure, + // which contains the file offset of the beginning of the lock range. + // We want to lock the entire file, so we leave the offset as zero. + ol := new(syscall.Overlapped) + + err := windows.LockFileEx(syscall.Handle(f.Fd()), uint32(lt), reserved, allBytes, allBytes, ol) + if err != nil { + return &fs.PathError{ + Op: lt.String(), + Path: f.Name(), + Err: err, + } + } + return nil +} + +func unlock(f File) error { + ol := new(syscall.Overlapped) + err := windows.UnlockFileEx(syscall.Handle(f.Fd()), reserved, allBytes, allBytes, ol) + if err != nil { + return &fs.PathError{ + Op: "Unlock", + Path: f.Name(), + Err: err, + } + } + return nil +} + +func isNotSupported(err error) bool { + switch err { + case windows.ERROR_NOT_SUPPORTED, windows.ERROR_CALL_NOT_IMPLEMENTED, ErrNotSupported: + return true + default: + return false + } +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go new file mode 100644 index 0000000000..82e1a89675 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go @@ -0,0 +1,187 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package lockedfile creates and manipulates files whose contents should only +// change atomically. +package lockedfile + +import ( + "fmt" + "io" + "io/fs" + "os" + "runtime" +) + +// A File is a locked *os.File. +// +// Closing the file releases the lock. +// +// If the program exits while a file is locked, the operating system releases +// the lock but may not do so promptly: callers must ensure that all locked +// files are closed before exiting. +type File struct { + osFile + closed bool +} + +// osFile embeds a *os.File while keeping the pointer itself unexported. +// (When we close a File, it must be the same file descriptor that we opened!) +type osFile struct { + *os.File +} + +// OpenFile is like os.OpenFile, but returns a locked file. +// If flag includes os.O_WRONLY or os.O_RDWR, the file is write-locked; +// otherwise, it is read-locked. +func OpenFile(name string, flag int, perm fs.FileMode) (*File, error) { + var ( + f = new(File) + err error + ) + f.osFile.File, err = openFile(name, flag, perm) + if err != nil { + return nil, err + } + + // Although the operating system will drop locks for open files when the go + // command exits, we want to hold locks for as little time as possible, and we + // especially don't want to leave a file locked after we're done with it. Our + // Close method is what releases the locks, so use a finalizer to report + // missing Close calls on a best-effort basis. + runtime.SetFinalizer(f, func(f *File) { + panic(fmt.Sprintf("lockedfile.File %s became unreachable without a call to Close", f.Name())) + }) + + return f, nil +} + +// Open is like os.Open, but returns a read-locked file. +func Open(name string) (*File, error) { + return OpenFile(name, os.O_RDONLY, 0) +} + +// Create is like os.Create, but returns a write-locked file. +func Create(name string) (*File, error) { + return OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666) +} + +// Edit creates the named file with mode 0666 (before umask), +// but does not truncate existing contents. +// +// If Edit succeeds, methods on the returned File can be used for I/O. +// The associated file descriptor has mode O_RDWR and the file is write-locked. +func Edit(name string) (*File, error) { + return OpenFile(name, os.O_RDWR|os.O_CREATE, 0666) +} + +// Close unlocks and closes the underlying file. +// +// Close may be called multiple times; all calls after the first will return a +// non-nil error. +func (f *File) Close() error { + if f.closed { + return &fs.PathError{ + Op: "close", + Path: f.Name(), + Err: fs.ErrClosed, + } + } + f.closed = true + + err := closeFile(f.osFile.File) + runtime.SetFinalizer(f, nil) + return err +} + +// Read opens the named file with a read-lock and returns its contents. +func Read(name string) ([]byte, error) { + f, err := Open(name) + if err != nil { + return nil, err + } + defer f.Close() + + return io.ReadAll(f) +} + +// Write opens the named file (creating it with the given permissions if needed), +// then write-locks it and overwrites it with the given content. +func Write(name string, content io.Reader, perm fs.FileMode) (err error) { + f, err := OpenFile(name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm) + if err != nil { + return err + } + + _, err = io.Copy(f, content) + if closeErr := f.Close(); err == nil { + err = closeErr + } + return err +} + +// Transform invokes t with the result of reading the named file, with its lock +// still held. +// +// If t returns a nil error, Transform then writes the returned contents back to +// the file, making a best effort to preserve existing contents on error. +// +// t must not modify the slice passed to it. +func Transform(name string, t func([]byte) ([]byte, error)) (err error) { + f, err := Edit(name) + if err != nil { + return err + } + defer f.Close() + + old, err := io.ReadAll(f) + if err != nil { + return err + } + + new, err := t(old) + if err != nil { + return err + } + + if len(new) > len(old) { + // The overall file size is increasing, so write the tail first: if we're + // about to run out of space on the disk, we would rather detect that + // failure before we have overwritten the original contents. + if _, err := f.WriteAt(new[len(old):], int64(len(old))); err != nil { + // Make a best effort to remove the incomplete tail. + f.Truncate(int64(len(old))) + return err + } + } + + // We're about to overwrite the old contents. In case of failure, make a best + // effort to roll back before we close the file. + defer func() { + if err != nil { + if _, err := f.WriteAt(old, 0); err == nil { + f.Truncate(int64(len(old))) + } + } + }() + + if len(new) >= len(old) { + if _, err := f.WriteAt(new[:len(old)], 0); err != nil { + return err + } + } else { + if _, err := f.WriteAt(new, 0); err != nil { + return err + } + // The overall file size is decreasing, so shrink the file to its final size + // after writing. We do this after writing (instead of before) so that if + // the write fails, enough filesystem space will likely still be reserved + // to contain the previous contents. + if err := f.Truncate(int64(len(new))); err != nil { + return err + } + } + + return nil +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go new file mode 100644 index 0000000000..454c3a42c4 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go @@ -0,0 +1,65 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !plan9 + +package lockedfile + +import ( + "io/fs" + "os" + + "github.com/rogpeppe/go-internal/lockedfile/internal/filelock" +) + +func openFile(name string, flag int, perm fs.FileMode) (*os.File, error) { + // On BSD systems, we could add the O_SHLOCK or O_EXLOCK flag to the OpenFile + // call instead of locking separately, but we have to support separate locking + // calls for Linux and Windows anyway, so it's simpler to use that approach + // consistently. + + f, err := os.OpenFile(name, flag&^os.O_TRUNC, perm) + if err != nil { + return nil, err + } + + switch flag & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR) { + case os.O_WRONLY, os.O_RDWR: + err = filelock.Lock(f) + default: + err = filelock.RLock(f) + } + if err != nil { + f.Close() + return nil, err + } + + if flag&os.O_TRUNC == os.O_TRUNC { + if err := f.Truncate(0); err != nil { + // The documentation for os.O_TRUNC says “if possible, truncate file when + // opened”, but doesn't define “possible” (golang.org/issue/28699). + // We'll treat regular files (and symlinks to regular files) as “possible” + // and ignore errors for the rest. + if fi, statErr := f.Stat(); statErr != nil || fi.Mode().IsRegular() { + filelock.Unlock(f) + f.Close() + return nil, err + } + } + } + + return f, nil +} + +func closeFile(f *os.File) error { + // Since locking syscalls operate on file descriptors, we must unlock the file + // while the descriptor is still valid — that is, before the file is closed — + // and avoid unlocking files that are already closed. + err := filelock.Unlock(f) + + if closeErr := f.Close(); err == nil { + err = closeErr + } + return err +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go new file mode 100644 index 0000000000..a2ce794b96 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go @@ -0,0 +1,94 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build plan9 + +package lockedfile + +import ( + "io/fs" + "math/rand" + "os" + "strings" + "time" +) + +// Opening an exclusive-use file returns an error. +// The expected error strings are: +// +// - "open/create -- file is locked" (cwfs, kfs) +// - "exclusive lock" (fossil) +// - "exclusive use file already open" (ramfs) +var lockedErrStrings = [...]string{ + "file is locked", + "exclusive lock", + "exclusive use file already open", +} + +// Even though plan9 doesn't support the Lock/RLock/Unlock functions to +// manipulate already-open files, IsLocked is still meaningful: os.OpenFile +// itself may return errors that indicate that a file with the ModeExclusive bit +// set is already open. +func isLocked(err error) bool { + s := err.Error() + + for _, frag := range lockedErrStrings { + if strings.Contains(s, frag) { + return true + } + } + + return false +} + +func openFile(name string, flag int, perm fs.FileMode) (*os.File, error) { + // Plan 9 uses a mode bit instead of explicit lock/unlock syscalls. + // + // Per http://man.cat-v.org/plan_9/5/stat: “Exclusive use files may be open + // for I/O by only one fid at a time across all clients of the server. If a + // second open is attempted, it draws an error.” + // + // So we can try to open a locked file, but if it fails we're on our own to + // figure out when it becomes available. We'll use exponential backoff with + // some jitter and an arbitrary limit of 500ms. + + // If the file was unpacked or created by some other program, it might not + // have the ModeExclusive bit set. Set it before we call OpenFile, so that we + // can be confident that a successful OpenFile implies exclusive use. + if fi, err := os.Stat(name); err == nil { + if fi.Mode()&fs.ModeExclusive == 0 { + if err := os.Chmod(name, fi.Mode()|fs.ModeExclusive); err != nil { + return nil, err + } + } + } else if !os.IsNotExist(err) { + return nil, err + } + + nextSleep := 1 * time.Millisecond + const maxSleep = 500 * time.Millisecond + for { + f, err := os.OpenFile(name, flag, perm|fs.ModeExclusive) + if err == nil { + return f, nil + } + + if !isLocked(err) { + return nil, err + } + + time.Sleep(nextSleep) + + nextSleep += nextSleep + if nextSleep > maxSleep { + nextSleep = maxSleep + } + // Apply 10% jitter to avoid synchronizing collisions. + nextSleep += time.Duration((0.1*rand.Float64() - 0.05) * float64(nextSleep)) + } +} + +func closeFile(f *os.File) error { + return f.Close() +} diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go new file mode 100644 index 0000000000..180a36c620 --- /dev/null +++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go @@ -0,0 +1,67 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package lockedfile + +import ( + "fmt" + "os" + "sync" +) + +// A Mutex provides mutual exclusion within and across processes by locking a +// well-known file. Such a file generally guards some other part of the +// filesystem: for example, a Mutex file in a directory might guard access to +// the entire tree rooted in that directory. +// +// Mutex does not implement sync.Locker: unlike a sync.Mutex, a lockedfile.Mutex +// can fail to lock (e.g. if there is a permission error in the filesystem). +// +// Like a sync.Mutex, a Mutex may be included as a field of a larger struct but +// must not be copied after first use. The Path field must be set before first +// use and must not be change thereafter. +type Mutex struct { + Path string // The path to the well-known lock file. Must be non-empty. + mu sync.Mutex // A redundant mutex. The race detector doesn't know about file locking, so in tests we may need to lock something that it understands. +} + +// MutexAt returns a new Mutex with Path set to the given non-empty path. +func MutexAt(path string) *Mutex { + if path == "" { + panic("lockedfile.MutexAt: path must be non-empty") + } + return &Mutex{Path: path} +} + +func (mu *Mutex) String() string { + return fmt.Sprintf("lockedfile.Mutex(%s)", mu.Path) +} + +// Lock attempts to lock the Mutex. +// +// If successful, Lock returns a non-nil unlock function: it is provided as a +// return-value instead of a separate method to remind the caller to check the +// accompanying error. (See https://golang.org/issue/20803.) +func (mu *Mutex) Lock() (unlock func(), err error) { + if mu.Path == "" { + panic("lockedfile.Mutex: missing Path during Lock") + } + + // We could use either O_RDWR or O_WRONLY here. If we choose O_RDWR and the + // file at mu.Path is write-only, the call to OpenFile will fail with a + // permission error. That's actually what we want: if we add an RLock method + // in the future, it should call OpenFile with O_RDONLY and will require the + // files must be readable, so we should not let the caller make any + // assumptions about Mutex working with write-only files. + f, err := OpenFile(mu.Path, os.O_RDWR|os.O_CREATE, 0666) + if err != nil { + return nil, err + } + mu.mu.Lock() + + return func() { + mu.mu.Unlock() + f.Close() + }, nil +} diff --git a/vendor/github.com/securego/gosec/v2/action.yml b/vendor/github.com/securego/gosec/v2/action.yml index d4bc351c86..2b2deaab77 100644 --- a/vendor/github.com/securego/gosec/v2/action.yml +++ b/vendor/github.com/securego/gosec/v2/action.yml @@ -10,7 +10,7 @@ inputs: runs: using: 'docker' - image: 'docker://securego/gosec:2.21.1' + image: 'docker://securego/gosec:2.21.3' args: - ${{ inputs.args }} diff --git a/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go b/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go index f2157442f5..8d222384aa 100644 --- a/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go +++ b/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go @@ -51,7 +51,7 @@ func (al *AnalyzerList) AnalyzersInfo() (map[string]AnalyzerDefinition, map[stri type AnalyzerFilter func(string) bool // NewAnalyzerFilter is a closure that will include/exclude the analyzer ID's based on -// the supplied boolean value. +// the supplied boolean value (false means don't remove, true means exclude). func NewAnalyzerFilter(action bool, analyzerIDs ...string) AnalyzerFilter { analyzerlist := make(map[string]bool) for _, analyzer := range analyzerIDs { diff --git a/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go b/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go index 3ef4825af5..bebe9b8340 100644 --- a/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go +++ b/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go @@ -40,7 +40,7 @@ type integer struct { type rangeResult struct { minValue int maxValue uint - explixitPositiveVals []uint + explicitPositiveVals []uint explicitNegativeVals []int isRangeCheck bool convertFound bool @@ -271,7 +271,7 @@ func hasExplicitRangeCheck(instr *ssa.Convert, dstType string) bool { if result.isRangeCheck { minValue = max(minValue, &result.minValue) maxValue = min(maxValue, &result.maxValue) - explicitPositiveVals = append(explicitPositiveVals, result.explixitPositiveVals...) + explicitPositiveVals = append(explicitPositiveVals, result.explicitPositiveVals...) explicitNegativeVals = append(explicitNegativeVals, result.explicitNegativeVals...) } case *ssa.Call: @@ -325,16 +325,17 @@ func getResultRange(ifInstr *ssa.If, instr *ssa.Convert, visitedIfs map[*ssa.If] result.convertFound = true result.minValue = max(result.minValue, thenBounds.minValue) result.maxValue = min(result.maxValue, thenBounds.maxValue) - result.explixitPositiveVals = append(result.explixitPositiveVals, thenBounds.explixitPositiveVals...) - result.explicitNegativeVals = append(result.explicitNegativeVals, thenBounds.explicitNegativeVals...) } else if elseBounds.convertFound { result.convertFound = true result.minValue = max(result.minValue, elseBounds.minValue) result.maxValue = min(result.maxValue, elseBounds.maxValue) - result.explixitPositiveVals = append(result.explixitPositiveVals, elseBounds.explixitPositiveVals...) - result.explicitNegativeVals = append(result.explicitNegativeVals, elseBounds.explicitNegativeVals...) } + result.explicitPositiveVals = append(result.explicitPositiveVals, thenBounds.explixitPositiveVals...) + result.explicitNegativeVals = append(result.explicitNegativeVals, thenBounds.explicitNegativeVals...) + result.explicitPositiveVals = append(result.explicitPositiveVals, elseBounds.explixitPositiveVals...) + result.explicitNegativeVals = append(result.explicitNegativeVals, elseBounds.explicitNegativeVals...) + return result } @@ -344,15 +345,26 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con operandsFlipped := false compareVal, op := getRealValueFromOperation(instr.X) - if x != compareVal { - y, operandsFlipped = x, true + + // Handle FieldAddr + if fieldAddr, ok := compareVal.(*ssa.FieldAddr); ok { + compareVal = fieldAddr + } + + if !isSameOrRelated(x, compareVal) { + y = x + operandsFlipped = true } constVal, ok := y.(*ssa.Const) if !ok { return } - + // TODO: constVal.Value nil check avoids #1229 panic but seems to be hiding a bug in the code above or in x/tools/go/ssa. + if constVal.Value == nil { + // log.Fatalf("[gosec] constVal.Value is nil flipped=%t, constVal=%#v, binOp=%#v", operandsFlipped, constVal, binOp) + return + } switch binOp.Op { case token.LEQ, token.LSS: updateMinMaxForLessOrEqual(result, constVal, binOp.Op, operandsFlipped, successPathConvert) @@ -362,25 +374,12 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con if !successPathConvert { break } - - // Determine if the constant value is positive or negative. - if strings.Contains(constVal.String(), "-") { - result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64())) - } else { - result.explixitPositiveVals = append(result.explixitPositiveVals, uint(constVal.Uint64())) - } - + updateExplicitValues(result, constVal) case token.NEQ: if successPathConvert { break } - - // Determine if the constant value is positive or negative. - if strings.Contains(constVal.String(), "-") { - result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64())) - } else { - result.explixitPositiveVals = append(result.explixitPositiveVals, uint(constVal.Uint64())) - } + updateExplicitValues(result, constVal) } if op == "neg" { @@ -391,11 +390,19 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con result.maxValue = uint(min) } if max <= math.MaxInt { - result.minValue = int(max) //nolint:gosec + result.minValue = int(max) } } } +func updateExplicitValues(result *rangeResult, constVal *ssa.Const) { + if strings.Contains(constVal.String(), "-") { + result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64())) + } else { + result.explicitPositiveVals = append(result.explicitPositiveVals, uint(constVal.Uint64())) + } +} + func updateMinMaxForLessOrEqual(result *rangeResult, constVal *ssa.Const, op token.Token, operandsFlipped bool, successPathConvert bool) { // If the success path has a conversion and the operands are not flipped, then the constant value is the maximum value. if successPathConvert && !operandsFlipped { @@ -439,6 +446,8 @@ func walkBranchForConvert(block *ssa.BasicBlock, instr *ssa.Convert, visitedIfs if result.isRangeCheck { bounds.minValue = toPtr(max(result.minValue, bounds.minValue)) bounds.maxValue = toPtr(min(result.maxValue, bounds.maxValue)) + bounds.explixitPositiveVals = append(bounds.explixitPositiveVals, result.explicitPositiveVals...) + bounds.explicitNegativeVals = append(bounds.explicitNegativeVals, result.explicitNegativeVals...) } case *ssa.Call: if v == instr.X { @@ -463,9 +472,10 @@ func isRangeCheck(v ssa.Value, x ssa.Value) bool { switch op := v.(type) { case *ssa.BinOp: switch op.Op { - case token.LSS, token.LEQ, token.GTR, token.GEQ, - token.EQL, token.NEQ: - return op.X == compareVal || op.Y == compareVal + case token.LSS, token.LEQ, token.GTR, token.GEQ, token.EQL, token.NEQ: + leftMatch := isSameOrRelated(op.X, compareVal) + rightMatch := isSameOrRelated(op.Y, compareVal) + return leftMatch || rightMatch } } return false @@ -475,12 +485,36 @@ func getRealValueFromOperation(v ssa.Value) (ssa.Value, string) { switch v := v.(type) { case *ssa.UnOp: if v.Op == token.SUB { - return v.X, "neg" + val, _ := getRealValueFromOperation(v.X) + return val, "neg" } + return getRealValueFromOperation(v.X) + case *ssa.FieldAddr: + return v, "field" + case *ssa.Alloc: + return v, "alloc" } return v, "" } +func isSameOrRelated(a, b ssa.Value) bool { + aVal, _ := getRealValueFromOperation(a) + bVal, _ := getRealValueFromOperation(b) + + if aVal == bVal { + return true + } + + // Check if both are FieldAddr operations referring to the same field of the same struct + if aField, aOk := aVal.(*ssa.FieldAddr); aOk { + if bField, bOk := bVal.(*ssa.FieldAddr); bOk { + return aField.X == bField.X && aField.Field == bField.Field + } + } + + return false +} + func explicitValsInRange(explicitPosVals []uint, explicitNegVals []int, dstInt integer) bool { if len(explicitPosVals) == 0 && len(explicitNegVals) == 0 { return false diff --git a/vendor/github.com/sivchari/tenv/goreleaser.yaml b/vendor/github.com/sivchari/tenv/goreleaser.yaml new file mode 100644 index 0000000000..56db218d72 --- /dev/null +++ b/vendor/github.com/sivchari/tenv/goreleaser.yaml @@ -0,0 +1,26 @@ +project_name: tenv + +env: + - GO111MODULE=on + +builds: + - id: tenv + main: ./cmd/tenv/main.go + binary: tenv + env: + - CGO_ENABLED=0 + goos: + - linux + - darwin + goarch: + - amd64 + - arm64 + +archives: + - id: tenv + builds: + - tenv + name_template: '{{ .Binary }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}' + format_overrides: + - goos: windows + format: zip diff --git a/vendor/github.com/sivchari/tenv/tenv.go b/vendor/github.com/sivchari/tenv/tenv.go index 999c5289dc..657e60e4e1 100644 --- a/vendor/github.com/sivchari/tenv/tenv.go +++ b/vendor/github.com/sivchari/tenv/tenv.go @@ -2,6 +2,8 @@ package tenv import ( "go/ast" + "go/token" + "go/types" "strings" "golang.org/x/tools/go/analysis" @@ -51,7 +53,7 @@ func run(pass *analysis.Pass) (interface{}, error) { } func checkFuncDecl(pass *analysis.Pass, f *ast.FuncDecl, fileName string) { - argName, ok := targetRunner(f.Type.Params.List, fileName) + argName, ok := targetRunner(pass, f.Type.Params.List, fileName) if !ok { return } @@ -59,7 +61,7 @@ func checkFuncDecl(pass *analysis.Pass, f *ast.FuncDecl, fileName string) { } func checkFuncLit(pass *analysis.Pass, f *ast.FuncLit, fileName string) { - argName, ok := targetRunner(f.Type.Params.List, fileName) + argName, ok := targetRunner(pass, f.Type.Params.List, fileName) if !ok { return } @@ -70,45 +72,33 @@ func checkStmts(pass *analysis.Pass, stmts []ast.Stmt, funcName, argName string) for _, stmt := range stmts { switch stmt := stmt.(type) { case *ast.ExprStmt: - if !checkExprStmt(pass, stmt, funcName, argName) { - continue - } + checkExprStmt(pass, stmt, funcName, argName) case *ast.IfStmt: - if !checkIfStmt(pass, stmt, funcName, argName) { - continue - } + checkIfStmt(pass, stmt, funcName, argName) case *ast.AssignStmt: - if !checkAssignStmt(pass, stmt, funcName, argName) { - continue - } + checkAssignStmt(pass, stmt, funcName, argName) case *ast.ForStmt: checkForStmt(pass, stmt, funcName, argName) } } } -func checkExprStmt(pass *analysis.Pass, stmt *ast.ExprStmt, funcName, argName string) bool { +func checkExprStmt(pass *analysis.Pass, stmt *ast.ExprStmt, funcName, argName string) { callExpr, ok := stmt.X.(*ast.CallExpr) if !ok { - return false + return } checkArgs(pass, callExpr.Args, funcName, argName) - fun, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - return false - } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false + ident, ok := callExpr.Fun.(*ast.Ident) + if ok { + obj := pass.TypesInfo.ObjectOf(ident) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) + fun, ok := callExpr.Fun.(*ast.SelectorExpr) + if ok { + obj := pass.TypesInfo.ObjectOf(fun.Sel) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } - return true } func checkArgs(pass *analysis.Pass, args []ast.Expr, funcName, argName string) { @@ -117,83 +107,82 @@ func checkArgs(pass *analysis.Pass, args []ast.Expr, funcName, argName string) { if !ok { continue } - fun, ok := callExpr.Fun.(*ast.SelectorExpr) - if !ok { - continue - } - x, ok := fun.X.(*ast.Ident) - if !ok { - continue + ident, ok := callExpr.Fun.(*ast.Ident) + if ok { + obj := pass.TypesInfo.ObjectOf(ident) + checkObj(pass, obj, arg.Pos(), funcName, argName) } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(arg.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) + fun, ok := callExpr.Fun.(*ast.SelectorExpr) + if ok { + obj := pass.TypesInfo.ObjectOf(fun.Sel) + checkObj(pass, obj, arg.Pos(), funcName, argName) } } } -func checkIfStmt(pass *analysis.Pass, stmt *ast.IfStmt, funcName, argName string) bool { +func checkIfStmt(pass *analysis.Pass, stmt *ast.IfStmt, funcName, argName string) { assignStmt, ok := stmt.Init.(*ast.AssignStmt) if !ok { - return false + return } rhs, ok := assignStmt.Rhs[0].(*ast.CallExpr) if !ok { - return false - } - fun, ok := rhs.Fun.(*ast.SelectorExpr) - if !ok { - return false + return } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false + ident, ok := rhs.Fun.(*ast.Ident) + if ok { + obj := pass.TypesInfo.ObjectOf(ident) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } - targetName := x.Name + "." + fun.Sel.Name - if targetName == "os.Setenv" { - if argName == "" { - argName = "testing" - } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) + fun, ok := rhs.Fun.(*ast.SelectorExpr) + if ok { + obj := pass.TypesInfo.ObjectOf(fun.Sel) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } - return true } -func checkAssignStmt(pass *analysis.Pass, stmt *ast.AssignStmt, funcName, argName string) bool { +func checkAssignStmt(pass *analysis.Pass, stmt *ast.AssignStmt, funcName, argName string) { rhs, ok := stmt.Rhs[0].(*ast.CallExpr) if !ok { - return false + return + } + ident, ok := rhs.Fun.(*ast.Ident) + if ok { + obj := pass.TypesInfo.ObjectOf(ident) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } fun, ok := rhs.Fun.(*ast.SelectorExpr) - if !ok { - return false + if ok { + obj := pass.TypesInfo.ObjectOf(fun.Sel) + checkObj(pass, obj, stmt.Pos(), funcName, argName) } - x, ok := fun.X.(*ast.Ident) - if !ok { - return false +} + +func checkObj(pass *analysis.Pass, obj types.Object, pos token.Pos, funcName, argName string) { + // For built-in objects, obj.Pkg() returns nil. + var pkgPrefix string + if pkg := obj.Pkg(); pkg != nil { + pkgPrefix = pkg.Name() + "." } - targetName := x.Name + "." + fun.Sel.Name + + targetName := pkgPrefix + obj.Name() if targetName == "os.Setenv" { if argName == "" { argName = "testing" } - pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) + pass.Reportf(pos, "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName) } - return true } func checkForStmt(pass *analysis.Pass, stmt *ast.ForStmt, funcName, argName string) { checkStmts(pass, stmt.Body.List, funcName, argName) } -func targetRunner(params []*ast.Field, fileName string) (string, bool) { +func targetRunner(pass *analysis.Pass, params []*ast.Field, fileName string) (string, bool) { for _, p := range params { switch typ := p.Type.(type) { case *ast.StarExpr: - if checkStarExprTarget(typ) { + if checkStarExprTarget(pass, typ) { if len(p.Names) == 0 { return "", false } @@ -201,7 +190,7 @@ func targetRunner(params []*ast.Field, fileName string) (string, bool) { return argName, true } case *ast.SelectorExpr: - if checkSelectorExprTarget(typ) { + if checkSelectorExprTarget(pass, typ) { if len(p.Names) == 0 { return "", false } @@ -216,17 +205,12 @@ func targetRunner(params []*ast.Field, fileName string) (string, bool) { return "", false } -func checkStarExprTarget(typ *ast.StarExpr) bool { +func checkStarExprTarget(pass *analysis.Pass, typ *ast.StarExpr) bool { selector, ok := typ.X.(*ast.SelectorExpr) if !ok { return false } - x, ok := selector.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + selector.Sel.Name - switch targetName { + switch pass.TypesInfo.TypeOf(selector).String() { case "testing.T", "testing.B": return true default: @@ -234,11 +218,6 @@ func checkStarExprTarget(typ *ast.StarExpr) bool { } } -func checkSelectorExprTarget(typ *ast.SelectorExpr) bool { - x, ok := typ.X.(*ast.Ident) - if !ok { - return false - } - targetName := x.Name + "." + typ.Sel.Name - return targetName == "testing.TB" +func checkSelectorExprTarget(pass *analysis.Pass, typ *ast.SelectorExpr) bool { + return pass.TypesInfo.TypeOf(typ).String() == "testing.TB" } diff --git a/vendor/github.com/sonatard/noctx/.gitignore b/vendor/github.com/sonatard/noctx/.gitignore index 2d830686d4..8dfb40ff18 100644 --- a/vendor/github.com/sonatard/noctx/.gitignore +++ b/vendor/github.com/sonatard/noctx/.gitignore @@ -1 +1,2 @@ coverage.out +/noctx diff --git a/vendor/github.com/sonatard/noctx/.golangci.yml b/vendor/github.com/sonatard/noctx/.golangci.yml index 55ebeebdb0..726cb7c89a 100644 --- a/vendor/github.com/sonatard/noctx/.golangci.yml +++ b/vendor/github.com/sonatard/noctx/.golangci.yml @@ -1,14 +1,38 @@ -run: - linters-settings: - govet: - enable-all: true - linters: - enable-all: true - disable: - - gochecknoglobals - - gomnd - - gocognit - - nestif - - nilnil - - paralleltest - - varnamelen \ No newline at end of file +linters: + enable-all: true + disable: + - execinquery # deprecated + - exportloopref # deprecated + - gomnd # deprecated + - gochecknoglobals + - exhaustruct + - mnd + - gocognit + - nestif + - nilnil + - paralleltest + - varnamelen + +linters-settings: + govet: + enable-all: true + perfsprint: + err-error: true + errorf: true + sprintf1: true + strconcat: false + depguard: + rules: + main: + deny: + - pkg: "github.com/instana/testify" + desc: not allowed + - pkg: "github.com/pkg/errors" + desc: Should be replaced by standard lib errors package + +output: + show-stats: true + sort-results: true + sort-order: + - linter + - file diff --git a/vendor/github.com/sonatard/noctx/.goreleaser.yml b/vendor/github.com/sonatard/noctx/.goreleaser.yml new file mode 100644 index 0000000000..9000b50a15 --- /dev/null +++ b/vendor/github.com/sonatard/noctx/.goreleaser.yml @@ -0,0 +1,37 @@ +version: 2 +project_name: noctx + +builds: + - binary: noctx + + main: ./cmd/noctx/main.go + env: + - CGO_ENABLED=0 + flags: + - -trimpath + goos: + - windows + - darwin + - linux + goarch: + - amd64 + - 386 + - arm + - arm64 + goarm: + - 7 + - 6 + - 5 + ignore: + - goos: darwin + goarch: 386 + +archives: + - id: noctx + name_template: '{{ .ProjectName }}_v{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}{{ if .Mips }}_{{ .Mips }}{{ end }}' + format: tar.gz + format_overrides: + - goos: windows + format: zip + files: + - LICENSE diff --git a/vendor/github.com/sonatard/noctx/Makefile b/vendor/github.com/sonatard/noctx/Makefile index 1a27f6b595..585a9d7e20 100644 --- a/vendor/github.com/sonatard/noctx/Makefile +++ b/vendor/github.com/sonatard/noctx/Makefile @@ -1,6 +1,6 @@ -.PHONY: all imports test lint +.PHONY: all imports test lint build -all: imports test lint +all: imports test lint build imports: goimports -w ./ @@ -14,3 +14,5 @@ test_coverage: lint: golangci-lint run ./... +build: + go build -ldflags "-s -w" -trimpath ./cmd/noctx/ diff --git a/vendor/github.com/sonatard/noctx/README.md b/vendor/github.com/sonatard/noctx/README.md index b3793fc968..00d0024c50 100644 --- a/vendor/github.com/sonatard/noctx/README.md +++ b/vendor/github.com/sonatard/noctx/README.md @@ -52,10 +52,11 @@ linters: $ golangci-lint run # Only noctx execute -golangci-lint run --disable-all -E noctx +golangci-lint run --enable-only noctx ``` ## Detection rules + - Executing following functions - `net/http.Get` - `net/http.Head` @@ -68,6 +69,7 @@ golangci-lint run --disable-all -E noctx - `http.Request` returned by `http.NewRequest` function and passes it to other function. ## How to fix + - Send http request using `(*http.Client).Do(*http.Request)` method. - In Go 1.13 and later, use `http.NewRequestWithContext` function instead of using `http.NewRequest` function. - In Go 1.12 and earlier, call `(http.Request).WithContext(ctx)` after `http.NewRequest`. @@ -107,7 +109,7 @@ func SendWithContext(ctx context.Context, body io.Reader) error { // Change NewRequest to NewRequestWithContext and pass context it req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://example.com", body) if err != nil { - return nil + return err } _, err = http.DefaultClient.Do(req) if err != nil { @@ -168,6 +170,7 @@ func main() { ``` ## Reference + - [net/http - NewRequest](https://golang.org/pkg/net/http/#NewRequest) - [net/http - NewRequestWithContext](https://golang.org/pkg/net/http/#NewRequestWithContext) - [net/http - Request.WithContext](https://golang.org/pkg/net/http/#Request.WithContext) diff --git a/vendor/github.com/sonatard/noctx/ngfunc/main.go b/vendor/github.com/sonatard/noctx/ngfunc/main.go index 46306218d2..8d8d97d938 100644 --- a/vendor/github.com/sonatard/noctx/ngfunc/main.go +++ b/vendor/github.com/sonatard/noctx/ngfunc/main.go @@ -39,6 +39,7 @@ func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report { if !ok { panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer])) } + for _, sf := range ssa.SrcFuncs { for _, b := range sf.Blocks { for _, instr := range b.Instrs { diff --git a/vendor/github.com/sonatard/noctx/ngfunc/report.go b/vendor/github.com/sonatard/noctx/ngfunc/report.go index e500517986..735aa1cf9b 100644 --- a/vendor/github.com/sonatard/noctx/ngfunc/report.go +++ b/vendor/github.com/sonatard/noctx/ngfunc/report.go @@ -24,6 +24,6 @@ func (n *Report) Message() string { func report(pass *analysis.Pass, reports []*Report) { for _, report := range reports { - pass.Reportf(report.Pos(), report.Message()) + pass.Reportf(report.Pos(), "%s", report.Message()) } } diff --git a/vendor/github.com/sonatard/noctx/ngfunc/types.go b/vendor/github.com/sonatard/noctx/ngfunc/types.go index f1877386ce..8f81c6aa24 100644 --- a/vendor/github.com/sonatard/noctx/ngfunc/types.go +++ b/vendor/github.com/sonatard/noctx/ngfunc/types.go @@ -1,7 +1,7 @@ package ngfunc import ( - "fmt" + "errors" "go/types" "strings" @@ -9,7 +9,7 @@ import ( "golang.org/x/tools/go/analysis" ) -var errNotFound = fmt.Errorf("function not found") +var errNotFound = errors.New("function not found") func typeFuncs(pass *analysis.Pass, funcs []string) []*types.Func { fs := make([]*types.Func, 0, len(funcs)) diff --git a/vendor/github.com/sonatard/noctx/noctx.go b/vendor/github.com/sonatard/noctx/noctx.go index 89e0446ecd..fbffb66e74 100644 --- a/vendor/github.com/sonatard/noctx/noctx.go +++ b/vendor/github.com/sonatard/noctx/noctx.go @@ -2,6 +2,7 @@ package noctx import ( "fmt" + "github.com/sonatard/noctx/ngfunc" "github.com/sonatard/noctx/reqwithoutctx" "golang.org/x/tools/go/analysis" diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go index 1c94e3148c..3bf2ea01f5 100644 --- a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go @@ -21,6 +21,6 @@ func (n *Report) Message() string { func report(pass *analysis.Pass, reports []*Report) { for _, report := range reports { - pass.Reportf(report.Pos(), report.Message()) + pass.Reportf(report.Pos(), "%s", report.Message()) } } diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go index d7e0f5084d..62707c6b50 100644 --- a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go +++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go @@ -5,7 +5,6 @@ import ( "go/types" "github.com/gostaticanalysis/analysisutil" - "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/buildssa" "golang.org/x/tools/go/ssa" @@ -97,7 +96,7 @@ func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract { exts := make([]*ssa.Extract, 0, len(args)) // skip net/http.Request method call - if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) { + if recv := call.Common().Signature().Recv(); recv != nil && types.Identical(recv.Type(), a.requestType) { return exts } diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go index df5019f6c8..d30766358a 100644 --- a/vendor/github.com/tetafro/godot/checks.go +++ b/vendor/github.com/tetafro/godot/checks.go @@ -230,6 +230,8 @@ func isSpecialBlock(comment string) bool { strings.Contains(comment, "#define")) { return true } + // This should only be skipped in test files, but we don't have this + // information here, so - always skip if strings.HasPrefix(comment, "// Output:") || strings.HasPrefix(comment, "// Unordered output:") { return true diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go index 7d3d22fb13..de3d06e105 100644 --- a/vendor/github.com/tetafro/godot/getters.go +++ b/vendor/github.com/tetafro/godot/getters.go @@ -209,12 +209,8 @@ func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment { // special lines (e.g., tags or indented code examples), they are replaced // with `specialReplacer` to skip checks for them. // The result can be multiline. -// -//nolint:cyclop func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) { - if len(comment.List) == 1 && - strings.HasPrefix(comment.List[0].Text, "/*") && - isSpecialBlock(comment.List[0].Text) { + if len(comment.List) > 0 && isSpecialBlock(comment.List[0].Text) { return "" } diff --git a/vendor/github.com/timonwong/loggercheck/.golangci.yml b/vendor/github.com/timonwong/loggercheck/.golangci.yml index 2873278937..6d5e17befd 100644 --- a/vendor/github.com/timonwong/loggercheck/.golangci.yml +++ b/vendor/github.com/timonwong/loggercheck/.golangci.yml @@ -20,7 +20,7 @@ linters-settings: min-complexity: 15 goimports: local-prefixes: github.com/timonwong/loggercheck - gomnd: + mnd: # don't include the "operation" and "assign" checks: - argument @@ -36,7 +36,7 @@ linters-settings: - strings.SplitN - strconv.ParseInt govet: - check-shadowing: true + shadow: true lll: line-length: 140 misspell: @@ -52,7 +52,7 @@ linters: - dogsled - dupl - errcheck - - exportloopref + - copyloopvar - funlen - gochecknoinits - goconst @@ -60,7 +60,7 @@ linters: - gocyclo - gofumpt - goimports - - gomnd + - mnd - goprintffuncname - gosec - gosimple @@ -85,10 +85,10 @@ issues: exclude-rules: - path: _test\.go linters: - - gomnd + - mnd + exclude-dirs: + - testdata run: timeout: 5m - go: '1.17' - skip-dirs: - - testdata \ No newline at end of file + go: '1.23' \ No newline at end of file diff --git a/vendor/github.com/timonwong/loggercheck/README.md b/vendor/github.com/timonwong/loggercheck/README.md index 14aeca3717..d8f86fc414 100644 --- a/vendor/github.com/timonwong/loggercheck/README.md +++ b/vendor/github.com/timonwong/loggercheck/README.md @@ -7,6 +7,9 @@ A linter checks the odd number of key and value pairs for common logger librarie - [klog](https://github.com/kubernetes/klog) - [logr](https://github.com/go-logr/logr) - [zap](https://github.com/uber-go/zap) +- [log/slog](https://pkg.go.dev/log/slog) + +It's recommended to use loggercheck with [golangci-lint](https://golangci-lint.run/usage/linters/#loggercheck). ## Badges @@ -80,14 +83,14 @@ import ( func Example() { log := logr.Discard() - log = log.WithValues("key") - log.Info("message", "key1", "value1", "key2", "value2", "key3") + log = log.WithValues("key") + log.Info("message", "key1", "value1", "key2", "value2", "key3") log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2") log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2", "value2") var log2 logr.Logger log2 = log - log2.Info("message", "key1") + log2.Info("message", "key1") log3 := logr.FromContextOrDiscard(context.TODO()) log3.Error(fmt.Errorf("error"), "message", "key1") diff --git a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go b/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go deleted file mode 100644 index 9d88d21c49..0000000000 --- a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go +++ /dev/null @@ -1,22 +0,0 @@ -package bytebufferpool - -import ( - "bytes" - "sync" -) - -var pool = sync.Pool{ - New: func() interface{} { - return new(bytes.Buffer) - }, -} - -func Get() *bytes.Buffer { - buf := pool.Get().(*bytes.Buffer) - buf.Reset() - return buf -} - -func Put(buf *bytes.Buffer) { - pool.Put(buf) -} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go index 5615636efb..5fa1cfb2c1 100644 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go @@ -29,8 +29,7 @@ func ExecuteChecker(c Checker, pass *analysis.Pass, call CallContext, cfg Config nparams := params.Len() // variadic => nonzero startIndex := nparams - 1 - lastArg := params.At(nparams - 1) - iface, ok := lastArg.Type().(*types.Slice).Elem().(*types.Interface) + iface, ok := types.Unalias(params.At(startIndex).Type().(*types.Slice).Elem()).(*types.Interface) if !ok || !iface.Empty() { return // final (args) param is not ...interface{} } diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go index 42cbd01937..977f5d70cf 100644 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go @@ -6,11 +6,10 @@ import ( "go/printer" "go/token" "go/types" + "strings" "unicode/utf8" "golang.org/x/tools/go/analysis" - - "github.com/timonwong/loggercheck/internal/bytebufferpool" ) const ( @@ -31,9 +30,7 @@ func extractValueFromStringArg(pass *analysis.Pass, arg ast.Expr) (value string, func renderNodeEllipsis(fset *token.FileSet, v interface{}) string { const maxLen = 20 - buf := bytebufferpool.Get() - defer bytebufferpool.Put(buf) - + buf := &strings.Builder{} _ = printer.Fprint(buf, fset, v) s := buf.String() if utf8.RuneCountInString(s) > maxLen { diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go new file mode 100644 index 0000000000..a09a54f99c --- /dev/null +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go @@ -0,0 +1,35 @@ +package checkers + +import ( + "go/ast" + "go/types" + + "golang.org/x/tools/go/analysis" +) + +func filterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr, objName string) []ast.Expr { + // Check the argument count + filtered := make([]ast.Expr, 0, len(keyAndValues)) + for _, arg := range keyAndValues { + // Skip any object type field we found + switch arg := arg.(type) { + case *ast.CallExpr, *ast.Ident: + typ := types.Unalias(pass.TypesInfo.TypeOf(arg)) + + switch typ := typ.(type) { + case *types.Named: + obj := typ.Obj() + if obj != nil && obj.Name() == objName { + continue + } + + default: + // pass + } + } + + filtered = append(filtered, arg) + } + + return filtered +} diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go index b38f46f201..926b57e048 100644 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go @@ -157,7 +157,7 @@ func (s *formatState) parsePrecision() bool { func parsePrintfVerb(format string) *formatState { state := &formatState{ format: format, - flags: make([]byte, 0, 5), //nolint:gomnd + flags: make([]byte, 0, 5), //nolint:mnd nbytes: 1, // There's guaranteed to be a percent sign. } diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go new file mode 100644 index 0000000000..5812e66603 --- /dev/null +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go @@ -0,0 +1,19 @@ +package checkers + +import ( + "go/ast" + + "golang.org/x/tools/go/analysis" +) + +type Slog struct { + General +} + +func (z Slog) FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr { + // check slog.Group() constructed group slog.Attr + // since we also check `slog.Group` so it is OK skip here + return filterKeyAndValues(pass, keyAndValues, "Attr") +} + +var _ Checker = (*Slog)(nil) diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go index 2356f83482..4dac21f78e 100644 --- a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go +++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go @@ -2,7 +2,6 @@ package checkers import ( "go/ast" - "go/types" "golang.org/x/tools/go/analysis" ) @@ -12,31 +11,11 @@ type Zap struct { } func (z Zap) FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr { - // Check the argument count - filtered := make([]ast.Expr, 0, len(keyAndValues)) - for _, arg := range keyAndValues { - // Skip any zapcore.Field we found - switch arg := arg.(type) { - case *ast.CallExpr, *ast.Ident: - typ := pass.TypesInfo.TypeOf(arg) - switch typ := typ.(type) { - case *types.Named: - obj := typ.Obj() - // This is a strongly-typed field. Consume it and move on. - // Actually it's go.uber.org/zap/zapcore.Field, however for simplicity - // we don't check the import path - if obj != nil && obj.Name() == "Field" { - continue - } - default: - // pass - } - } - - filtered = append(filtered, arg) - } - - return filtered + // Skip any zapcore.Field we found + // This is a strongly-typed field. Consume it and move on. + // Actually it's go.uber.org/zap/zapcore.Field, however for simplicity + // we don't check the import path + return filterKeyAndValues(pass, keyAndValues, "Field") } var _ Checker = (*Zap)(nil) diff --git a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go b/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go index 27d6ebb274..3ed69b5bde 100644 --- a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go +++ b/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go @@ -7,8 +7,6 @@ import ( "go/types" "io" "strings" - - "github.com/timonwong/loggercheck/internal/bytebufferpool" ) var ErrInvalidRule = errors.New("invalid rule format") @@ -44,8 +42,7 @@ func (rs *Ruleset) Match(fn *types.Func) bool { } func receiverTypeOf(recvType types.Type) string { - buf := bytebufferpool.Get() - defer bytebufferpool.Put(buf) + buf := &strings.Builder{} var recvNamed *types.Named switch recvType := recvType.(type) { diff --git a/vendor/github.com/timonwong/loggercheck/loggercheck.go b/vendor/github.com/timonwong/loggercheck/loggercheck.go index 8bd10aee80..d418c7629a 100644 --- a/vendor/github.com/timonwong/loggercheck/loggercheck.go +++ b/vendor/github.com/timonwong/loggercheck/loggercheck.go @@ -54,7 +54,7 @@ func newLoggerCheck(opts ...Option) *loggercheck { } fs.StringVar(&l.ruleFile, "rulefile", "", "path to a file contains a list of rules") - fs.Var(&l.disable, "disable", "comma-separated list of disabled logger checker (kitlog,klog,logr,zap)") + fs.Var(&l.disable, "disable", "comma-separated list of disabled logger checker (kitlog,klog,logr,zap,slog)") fs.BoolVar(&l.requireStringKey, "requirestringkey", false, "require all logging keys to be inlined constant strings") fs.BoolVar(&l.noPrintfLike, "noprintflike", false, "require printf-like format specifier not present in args") diff --git a/vendor/github.com/timonwong/loggercheck/staticrules.go b/vendor/github.com/timonwong/loggercheck/staticrules.go index f955b34341..1398e47b21 100644 --- a/vendor/github.com/timonwong/loggercheck/staticrules.go +++ b/vendor/github.com/timonwong/loggercheck/staticrules.go @@ -39,10 +39,38 @@ var ( "github.com/go-kit/log.WithSuffix", "(github.com/go-kit/log.Logger).Log", }), + mustNewStaticRuleSet("slog", []string{ + "log/slog.Group", + + "log/slog.With", + + "log/slog.Debug", + "log/slog.Info", + "log/slog.Warn", + "log/slog.Error", + + "log/slog.DebugContext", + "log/slog.InfoContext", + "log/slog.WarnContext", + "log/slog.ErrorContext", + + "(*log/slog.Logger).With", + + "(*log/slog.Logger).Debug", + "(*log/slog.Logger).Info", + "(*log/slog.Logger).Warn", + "(*log/slog.Logger).Error", + + "(*log/slog.Logger).DebugContext", + "(*log/slog.Logger).InfoContext", + "(*log/slog.Logger).WarnContext", + "(*log/slog.Logger).ErrorContext", + }), } checkerByRulesetName = map[string]checkers.Checker{ // by default, checkers.General will be used. - "zap": checkers.Zap{}, + "zap": checkers.Zap{}, + "slog": checkers.Slog{}, } ) diff --git a/vendor/github.com/uudashr/iface/LICENSE b/vendor/github.com/uudashr/iface/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/vendor/github.com/uudashr/iface/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/vendor/github.com/uudashr/iface/identical/doc.go b/vendor/github.com/uudashr/iface/identical/doc.go new file mode 100644 index 0000000000..5b848a913b --- /dev/null +++ b/vendor/github.com/uudashr/iface/identical/doc.go @@ -0,0 +1,3 @@ +// Package identical defines an Analyzer that identifies interfaces in the same +// package with identical methods or constraints. +package identical diff --git a/vendor/github.com/uudashr/iface/identical/identical.go b/vendor/github.com/uudashr/iface/identical/identical.go new file mode 100644 index 0000000000..bd573cfc4f --- /dev/null +++ b/vendor/github.com/uudashr/iface/identical/identical.go @@ -0,0 +1,138 @@ +package identical + +import ( + "fmt" + "go/ast" + "go/token" + "go/types" + "reflect" + + "github.com/uudashr/iface/internal/directive" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Analyzer is the duplicate interface analyzer. +var Analyzer = newAnalyzer() + +func newAnalyzer() *analysis.Analyzer { + r := runner{} + + analyzer := &analysis.Analyzer{ + Name: "identical", + Doc: "Identifies interfaces in the same package that have identical method sets", + URL: "https://pkg.go.dev/github.com/uudashr/iface/duplicate", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: r.run, + } + + analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode") + + return analyzer +} + +type runner struct { + debug bool +} + +func (r *runner) run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Collect interface type declarations + ifaceDecls := make(map[string]token.Pos) + ifaceTypes := make(map[string]*types.Interface) + + nodeFilter := []ast.Node{ + (*ast.GenDecl)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + decl, ok := n.(*ast.GenDecl) + if !ok { + return + } + + if r.debug { + fmt.Printf("GenDecl: %v specs=%d\n", decl.Tok, len(decl.Specs)) + } + + if decl.Tok != token.TYPE { + return + } + + for i, spec := range decl.Specs { + if r.debug { + fmt.Printf(" spec[%d]: %v %v\n", i, spec, reflect.TypeOf(spec)) + } + + ts, ok := spec.(*ast.TypeSpec) + if !ok { + return + } + + ifaceType, ok := ts.Type.(*ast.InterfaceType) + if !ok { + return + } + + if r.debug { + fmt.Println("Interface declaration:", ts.Name.Name, ts.Pos(), len(ifaceType.Methods.List)) + + for i, field := range ifaceType.Methods.List { + switch ft := field.Type.(type) { + case *ast.FuncType: + fmt.Printf(" [%d] Field: func %s %v %v\n", i, field.Names[0].Name, reflect.TypeOf(field.Type), field.Pos()) + case *ast.Ident: + fmt.Printf(" [%d] Field: iface %s %v %v\n", i, ft.Name, reflect.TypeOf(field.Type), field.Pos()) + default: + fmt.Printf(" [%d] Field: unknown %v\n", i, reflect.TypeOf(ft)) + } + } + } + + dir := directive.ParseIgnore(decl.Doc) + if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) { + // skip due to ignore directive + continue + } + + ifaceDecls[ts.Name.Name] = ts.Pos() + + obj := pass.TypesInfo.Defs[ts.Name] + if obj == nil { + return + } + + iface, ok := obj.Type().Underlying().(*types.Interface) + if !ok { + return + } + + ifaceTypes[ts.Name.Name] = iface + } + }) + +Loop: + for name, typ := range ifaceTypes { + for otherName, otherTyp := range ifaceTypes { + if name == otherName { + continue + } + + if !types.Identical(typ, otherTyp) { + continue + } + + if r.debug { + fmt.Println("Identical interface:", name, "and", otherName) + } + + pass.Reportf(ifaceDecls[name], "interface %s contains identical methods or type constraints from another interface, causing redundancy", name) + + continue Loop + } + } + + return nil, nil +} diff --git a/vendor/github.com/uudashr/iface/internal/directive/directive.go b/vendor/github.com/uudashr/iface/internal/directive/directive.go new file mode 100644 index 0000000000..05c62928ed --- /dev/null +++ b/vendor/github.com/uudashr/iface/internal/directive/directive.go @@ -0,0 +1,76 @@ +package directive + +import ( + "go/ast" + "slices" + "strings" +) + +// Ignore represent a special instruction embebded in the source code. +// +// The directive can be as simple as +// +// //iface:ignore +// +// or consist of name +// +// //iface:ignore=unused +// +// or multiple names +// +// //iface:ignore=unused,identical +type Ignore struct { + Names []string +} + +// ParseIgnore parse the directive from the comments. +func ParseIgnore(doc *ast.CommentGroup) *Ignore { + if doc == nil { + return nil + } + + for _, comment := range doc.List { + text := strings.TrimSpace(comment.Text) + if text == "//iface:ignore" { + return &Ignore{} + } + + // parse the Names if exists + if val, found := strings.CutPrefix(text, "//iface:ignore="); found { + val = strings.TrimSpace(val) + if val == "" { + return &Ignore{} + } + + names := strings.Split(val, ",") + if len(names) == 0 { + continue + } + + for i, name := range names { + names[i] = strings.TrimSpace(name) + } + + if len(names) > 0 { + return &Ignore{Names: names} + } + + return &Ignore{} + } + } + + return nil +} + +func (i *Ignore) hasName(name string) bool { + return slices.Contains(i.Names, name) +} + +// ShouldIgnore return true if the name should be ignored. +func (i *Ignore) ShouldIgnore(name string) bool { + if len(i.Names) == 0 { + return true + } + + return i.hasName(name) +} diff --git a/vendor/github.com/uudashr/iface/opaque/doc.go b/vendor/github.com/uudashr/iface/opaque/doc.go new file mode 100644 index 0000000000..1e5a538979 --- /dev/null +++ b/vendor/github.com/uudashr/iface/opaque/doc.go @@ -0,0 +1,3 @@ +// Package opaque defines an Analyzer to that detects interfaces that are used +// to abstract a single concrete implementation only. +package opaque diff --git a/vendor/github.com/uudashr/iface/opaque/opaque.go b/vendor/github.com/uudashr/iface/opaque/opaque.go new file mode 100644 index 0000000000..fda0f001ba --- /dev/null +++ b/vendor/github.com/uudashr/iface/opaque/opaque.go @@ -0,0 +1,321 @@ +package opaque + +import ( + "fmt" + "go/ast" + "go/types" + "reflect" + "strings" + + "github.com/uudashr/iface/internal/directive" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Analyzer is the opaque interface analyzer. +var Analyzer = newAnalyzer() + +func newAnalyzer() *analysis.Analyzer { + r := runner{} + + analyzer := &analysis.Analyzer{ + Name: "opaque", + Doc: "Identifies functions that return interfaces, but the actual returned value is always a single concrete implementation.", + URL: "https://pkg.go.dev/github.com/uudashr/iface/opaque", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: r.run, + } + + analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode") + + return analyzer +} + +type runner struct { + debug bool +} + +func (r *runner) run(pass *analysis.Pass) (interface{}, error) { + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Find function declarations that return an interface + + nodeFilter := []ast.Node{ + (*ast.FuncDecl)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + funcDecl := n.(*ast.FuncDecl) + + if funcDecl.Recv != nil { + // skip methods + return + } + + if funcDecl.Type.Results == nil { + // skip functions without return values + return + } + + if r.debug { + fmt.Printf("Function declaration %s\n", funcDecl.Name.Name) + fmt.Printf(" Results len=%d\n", len(funcDecl.Type.Results.List)) + } + + dir := directive.ParseIgnore(funcDecl.Doc) + if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) { + // skip ignored function + return + } + + // Pre-check, only function that has interface return type will be processed + var hasInterfaceReturnType bool + + var outCount int + + for i, result := range funcDecl.Type.Results.List { + outInc := 1 + if namesLen := len(result.Names); namesLen > 0 { + outInc = namesLen + } + + outCount += outInc + + resType := result.Type + typ := pass.TypesInfo.TypeOf(resType) + + if r.debug { + fmt.Printf(" [%d] len=%d %v %v %v | %v %v interface=%t\n", i, len(result.Names), result.Names, resType, reflect.TypeOf(resType), typ, reflect.TypeOf(typ), types.IsInterface(typ)) + } + + if types.IsInterface(typ) && !hasInterfaceReturnType { + hasInterfaceReturnType = true + } + } + + if r.debug { + fmt.Printf(" hasInterface=%t outCount=%d\n", hasInterfaceReturnType, outCount) + } + + if !hasInterfaceReturnType { + // skip, since it has no interface return type + return + } + + // Collect types on every return statement + retStmtTypes := make([]map[types.Type]struct{}, outCount) + for i := range retStmtTypes { + retStmtTypes[i] = make(map[types.Type]struct{}) + } + + ast.Inspect(funcDecl.Body, func(n ast.Node) bool { + switch n := n.(type) { + case *ast.FuncLit: + // ignore nested functions + return false + case *ast.ReturnStmt: + if r.debug { + fmt.Printf(" Return statements %v len=%d\n", n.Results, len(n.Results)) + } + + for i, result := range n.Results { + if r.debug { + fmt.Printf(" [%d] %v %v\n", i, result, reflect.TypeOf(result)) + } + + switch res := result.(type) { + case *ast.CallExpr: + if r.debug { + fmt.Printf(" CallExpr Fun: %v %v\n", res.Fun, reflect.TypeOf(res.Fun)) + } + + typ := pass.TypesInfo.TypeOf(res) + switch typ := typ.(type) { + case *types.Tuple: + for i := 0; i < typ.Len(); i++ { + v := typ.At(i) + vTyp := v.Type() + retStmtTypes[i][vTyp] = struct{}{} + + if r.debug { + fmt.Printf(" Tuple [%d]: %v %v | %v %v interface=%t\n", i, v, reflect.TypeOf(v), vTyp, reflect.TypeOf(vTyp), types.IsInterface(vTyp)) + } + } + default: + retStmtTypes[i][typ] = struct{}{} + } + + case *ast.Ident: + if r.debug { + fmt.Printf(" Ident: %v %v\n", res, reflect.TypeOf(res)) + } + + typ := pass.TypesInfo.TypeOf(res) + + if r.debug { + fmt.Printf(" Ident type: %v %v interface=%t\n", typ, reflect.TypeOf(typ), types.IsInterface(typ)) + } + + retStmtTypes[i][typ] = struct{}{} + case *ast.UnaryExpr: + if r.debug { + fmt.Printf(" UnaryExpr X: %v \n", res.X) + } + + typ := pass.TypesInfo.TypeOf(res) + + if r.debug { + fmt.Printf(" UnaryExpr type: %v %v interface=%t\n", typ, reflect.TypeOf(typ), types.IsInterface(typ)) + } + + retStmtTypes[i][typ] = struct{}{} + default: + if r.debug { + fmt.Printf(" Unknown: %v %v\n", res, reflect.TypeOf(res)) + } + + typ := pass.TypesInfo.TypeOf(res) + retStmtTypes[i][typ] = struct{}{} + } + } + + return false + default: + return true + } + }) + + // Compare func return types with the return statement types + var nextIdx int + + for _, result := range funcDecl.Type.Results.List { + resType := result.Type + typ := pass.TypesInfo.TypeOf(resType) + + consumeCount := 1 + if len(result.Names) > 0 { + consumeCount = len(result.Names) + } + + currentIdx := nextIdx + nextIdx += consumeCount + + // Check return type + if !types.IsInterface(typ) { + // it is a concrete type + continue + } + + if typ.String() == "error" { + // very common case to have return type error + continue + } + + if !fromSamePackage(pass, typ) { + // ignore interface from other package + continue + } + + // Check statement type + stmtTyps := retStmtTypes[currentIdx] + + stmtTypsSize := len(stmtTyps) + if stmtTypsSize > 1 { + // it has multiple implementation + continue + } + + if stmtTypsSize == 0 { + // function use named return value, while return statement is empty + continue + } + + var stmtTyp types.Type + for t := range stmtTyps { + // expect only one, we don't have to break it + stmtTyp = t + } + + if types.IsInterface(stmtTyp) { + // not concrete type, skip + continue + } + + if r.debug { + fmt.Printf("stmtType: %v %v | %v %v\n", stmtTyp, reflect.TypeOf(stmtTyp), stmtTyp.Underlying(), reflect.TypeOf(stmtTyp.Underlying())) + } + + switch stmtTyp := stmtTyp.(type) { + case *types.Basic: + if stmtTyp.Kind() == types.UntypedNil { + // ignore nil + continue + } + case *types.Named: + if _, ok := stmtTyp.Underlying().(*types.Signature); ok { + // skip function type + continue + } + } + + retTypeName := typ.String() + if fromSamePackage(pass, typ) { + retTypeName = removePkgPrefix(retTypeName) + } + + stmtTypName := stmtTyp.String() + if fromSamePackage(pass, stmtTyp) { + stmtTypName = removePkgPrefix(stmtTypName) + } + + pass.Reportf(result.Pos(), + "%s function return %s interface at the %s result, abstract a single concrete implementation of %s", + funcDecl.Name.Name, + retTypeName, + positionStr(currentIdx), + stmtTypName) + } + }) + + return nil, nil +} + +func positionStr(idx int) string { + switch idx { + case 0: + return "1st" + case 1: + return "2nd" + case 2: + return "3rd" + default: + return fmt.Sprintf("%dth", idx+1) + } +} + +func fromSamePackage(pass *analysis.Pass, typ types.Type) bool { + switch typ := typ.(type) { + case *types.Named: + currentPkg := pass.Pkg + ifacePkg := typ.Obj().Pkg() + + return currentPkg == ifacePkg + case *types.Pointer: + return fromSamePackage(pass, typ.Elem()) + default: + return false + } +} + +func removePkgPrefix(typeStr string) string { + if typeStr[0] == '*' { + return "*" + removePkgPrefix(typeStr[1:]) + } + + if lastDot := strings.LastIndex(typeStr, "."); lastDot != -1 { + return typeStr[lastDot+1:] + } + + return typeStr +} diff --git a/vendor/github.com/uudashr/iface/unused/doc.go b/vendor/github.com/uudashr/iface/unused/doc.go new file mode 100644 index 0000000000..d5b6f24bba --- /dev/null +++ b/vendor/github.com/uudashr/iface/unused/doc.go @@ -0,0 +1,3 @@ +// Package unused defines an Analyzer that indetifies interfaces that are not +// used anywhere in the same package where the interface is defined. +package unused diff --git a/vendor/github.com/uudashr/iface/unused/unused.go b/vendor/github.com/uudashr/iface/unused/unused.go new file mode 100644 index 0000000000..c2efbf52c8 --- /dev/null +++ b/vendor/github.com/uudashr/iface/unused/unused.go @@ -0,0 +1,138 @@ +package unused + +import ( + "fmt" + "go/ast" + "go/token" + "reflect" + "slices" + "strings" + + "github.com/uudashr/iface/internal/directive" + "golang.org/x/tools/go/analysis" + "golang.org/x/tools/go/analysis/passes/inspect" + "golang.org/x/tools/go/ast/inspector" +) + +// Analyzer is the unused interface analyzer. +var Analyzer = newAnalyzer() + +func newAnalyzer() *analysis.Analyzer { + r := runner{} + + analyzer := &analysis.Analyzer{ + Name: "unused", + Doc: "Identifies interfaces that are not used anywhere in the same package where the interface is defined", + URL: "https://pkg.go.dev/github.com/uudashr/iface/unused", + Requires: []*analysis.Analyzer{inspect.Analyzer}, + Run: r.run, + } + + analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode") + analyzer.Flags.StringVar(&r.exclude, "exclude", "", "comma-separated list of packages to exclude from the check") + + return analyzer +} + +type runner struct { + debug bool + exclude string +} + +func (r *runner) run(pass *analysis.Pass) (interface{}, error) { + excludes := strings.Split(r.exclude, ",") + if slices.Contains(excludes, pass.Pkg.Path()) { + return nil, nil + } + + inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + + // Collect all interface type declarations + ifaceDecls := make(map[string]token.Pos) + + nodeFilter := []ast.Node{ + (*ast.GenDecl)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + decl, ok := n.(*ast.GenDecl) + if !ok { + return + } + + if r.debug { + fmt.Printf("GenDecl: %v specs=%d\n", decl.Tok, len(decl.Specs)) + } + + if decl.Tok != token.TYPE { + return + } + + for i, spec := range decl.Specs { + if r.debug { + fmt.Printf(" spec[%d]: %v %v\n", i, spec, reflect.TypeOf(spec)) + } + + ts, ok := spec.(*ast.TypeSpec) + if !ok { + continue + } + + _, ok = ts.Type.(*ast.InterfaceType) + if !ok { + return + } + + if r.debug { + fmt.Println(" Interface type declaration:", ts.Name.Name, ts.Pos()) + } + + dir := directive.ParseIgnore(decl.Doc) + if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) { + // skip due to ignore directive + continue + } + + ifaceDecls[ts.Name.Name] = ts.Pos() + } + }) + + if r.debug { + var ifaceNames []string + for name := range ifaceDecls { + ifaceNames = append(ifaceNames, name) + } + + fmt.Println("Declared interfaces:", ifaceNames) + } + + // Inspect whether the interface is used within the package + nodeFilter = []ast.Node{ + (*ast.Ident)(nil), + } + + inspect.Preorder(nodeFilter, func(n ast.Node) { + ident, ok := n.(*ast.Ident) + if !ok { + return + } + + pos := ifaceDecls[ident.Name] + if pos == ident.Pos() { + // The identifier is the interface type declaration + return + } + + delete(ifaceDecls, ident.Name) + }) + + if r.debug { + fmt.Printf("Package %s %s\n", pass.Pkg.Path(), pass.Pkg.Name()) + } + + for name, pos := range ifaceDecls { + pass.Reportf(pos, "interface %s is declared but not used within the package", name) + } + + return nil, nil +} diff --git a/vendor/go-simpler.org/musttag/builtins.go b/vendor/go-simpler.org/musttag/builtins.go index 3305513f8f..60fa89413c 100644 --- a/vendor/go-simpler.org/musttag/builtins.go +++ b/vendor/go-simpler.org/musttag/builtins.go @@ -3,131 +3,65 @@ package musttag // builtins is a set of functions supported out of the box. var builtins = []Func{ // https://pkg.go.dev/encoding/json - { - Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1, - ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0, - ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}, - }, + {"encoding/json.Marshal", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}}, + {"encoding/json.MarshalIndent", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}}, + {"encoding/json.Unmarshal", "json", 1, []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"(*encoding/json.Encoder).Encode", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}}, + {"(*encoding/json.Decoder).Decode", "json", 0, []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}}, // https://pkg.go.dev/encoding/xml - { - Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}, - }, - { - Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0, - ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, + {"encoding/xml.Marshal", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}}, + {"encoding/xml.MarshalIndent", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}}, + {"encoding/xml.Unmarshal", "xml", 1, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"(*encoding/xml.Encoder).Encode", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}}, + {"(*encoding/xml.Decoder).Decode", "xml", 0, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"(*encoding/xml.Encoder).EncodeElement", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}}, + {"(*encoding/xml.Decoder).DecodeElement", "xml", 0, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}}, // https://pkg.go.dev/gopkg.in/yaml.v3 - { - Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"}, - }, - { - Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"}, - }, - { - Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"}, - }, - { - Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0, - ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"}, - }, + {"gopkg.in/yaml.v3.Marshal", "yaml", 0, []string{"gopkg.in/yaml.v3.Marshaler"}}, + {"gopkg.in/yaml.v3.Unmarshal", "yaml", 1, []string{"gopkg.in/yaml.v3.Unmarshaler"}}, + {"(*gopkg.in/yaml.v3.Encoder).Encode", "yaml", 0, []string{"gopkg.in/yaml.v3.Marshaler"}}, + {"(*gopkg.in/yaml.v3.Decoder).Decode", "yaml", 0, []string{"gopkg.in/yaml.v3.Unmarshaler"}}, // https://pkg.go.dev/github.com/BurntSushi/toml - { - Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, - { - Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0, - ifaceWhitelist: []string{"encoding.TextMarshaler"}, - }, - { - Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0, - ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}, - }, + {"github.com/BurntSushi/toml.Unmarshal", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"github.com/BurntSushi/toml.Decode", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"github.com/BurntSushi/toml.DecodeFS", "toml", 2, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"github.com/BurntSushi/toml.DecodeFile", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}}, + {"(*github.com/BurntSushi/toml.Encoder).Encode", "toml", 0, []string{"encoding.TextMarshaler"}}, + {"(*github.com/BurntSushi/toml.Decoder).Decode", "toml", 0, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}}, // https://pkg.go.dev/github.com/mitchellh/mapstructure - {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1}, - {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1}, + {"github.com/mitchellh/mapstructure.Decode", "mapstructure", 1, nil}, + {"github.com/mitchellh/mapstructure.DecodeMetadata", "mapstructure", 1, nil}, + {"github.com/mitchellh/mapstructure.WeakDecode", "mapstructure", 1, nil}, + {"github.com/mitchellh/mapstructure.WeakDecodeMetadata", "mapstructure", 1, nil}, // https://pkg.go.dev/github.com/jmoiron/sqlx - {Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1}, - {Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2}, - {Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1}, - {Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2}, - {Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1}, - {Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0}, - {Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1}, + {"github.com/jmoiron/sqlx.Get", "db", 1, []string{"database/sql.Scanner"}}, + {"github.com/jmoiron/sqlx.GetContext", "db", 2, []string{"database/sql.Scanner"}}, + {"github.com/jmoiron/sqlx.Select", "db", 1, []string{"database/sql.Scanner"}}, + {"github.com/jmoiron/sqlx.SelectContext", "db", 2, []string{"database/sql.Scanner"}}, + {"github.com/jmoiron/sqlx.StructScan", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Conn).GetContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Conn).SelectContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.DB).Get", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.DB).GetContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.DB).Select", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.DB).SelectContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.NamedStmt).Get", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.NamedStmt).GetContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.NamedStmt).Select", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Row).StructScan", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Rows).StructScan", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Stmt).Get", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Stmt).GetContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Stmt).Select", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Stmt).SelectContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Tx).Get", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Tx).GetContext", "db", 1, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Tx).Select", "db", 0, []string{"database/sql.Scanner"}}, + {"(*github.com/jmoiron/sqlx.Tx).SelectContext", "db", 1, []string{"database/sql.Scanner"}}, } diff --git a/vendor/go-simpler.org/musttag/musttag.go b/vendor/go-simpler.org/musttag/musttag.go index 70c84201b0..f8c0352d20 100644 --- a/vendor/go-simpler.org/musttag/musttag.go +++ b/vendor/go-simpler.org/musttag/musttag.go @@ -91,17 +91,17 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any, call, ok := node.(*ast.CallExpr) if !ok { - return // not a function call. + return } callee := typeutil.StaticCallee(pass.TypesInfo, call) if callee == nil { - return // not a static call. + return } fn, ok := funcs[cutVendor(callee.FullName())] if !ok { - return // unsupported function. + return } if len(call.Args) <= fn.ArgPos { @@ -116,7 +116,7 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any, typ := pass.TypesInfo.TypeOf(arg) if typ == nil { - return // no type info found. + return } checker := checker{ @@ -125,9 +125,8 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any, ifaceWhitelist: fn.ifaceWhitelist, imports: pass.Pkg.Imports(), } - - if valid := checker.checkType(typ, fn.Tag); valid { - return // nothing to report. + if checker.isValidType(typ, fn.Tag) { + return } pass.Reportf(arg.Pos(), "the given struct should be annotated with the `%s` tag", fn.Tag) @@ -143,43 +142,32 @@ type checker struct { imports []*types.Package } -func (c *checker) checkType(typ types.Type, tag string) bool { +func (c *checker) isValidType(typ types.Type, tag string) bool { if _, ok := c.seenTypes[typ.String()]; ok { - return true // already checked. + return true } c.seenTypes[typ.String()] = struct{}{} styp, ok := c.parseStruct(typ) if !ok { - return true // not a struct. + return true } - return c.checkStruct(styp, tag) + return c.isValidStruct(styp, tag) } -// recursively unwrap a type until we get to an underlying -// raw struct type that should have its fields checked -// -// SomeStruct -> struct{SomeStructField: ... } -// []*SomeStruct -> struct{SomeStructField: ... } -// ... -// -// exits early if it hits a type that implements a whitelisted interface func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) { if implementsInterface(typ, c.ifaceWhitelist, c.imports) { - return nil, false // the type implements a Marshaler interface; see issue #64. + return nil, false } switch typ := typ.(type) { case *types.Pointer: return c.parseStruct(typ.Elem()) - case *types.Array: return c.parseStruct(typ.Elem()) - case *types.Slice: return c.parseStruct(typ.Elem()) - case *types.Map: return c.parseStruct(typ.Elem()) @@ -205,7 +193,7 @@ func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) { } } -func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) { +func (c *checker) isValidStruct(styp *types.Struct, tag string) bool { for i := 0; i < styp.NumFields(); i++ { field := styp.Field(i) if !field.Exported() { @@ -214,18 +202,18 @@ func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) { tagValue, ok := reflect.StructTag(styp.Tag(i)).Lookup(tag) if !ok { - // tag is not required for embedded types; see issue #12. + // tag is not required for embedded types. if !field.Embedded() { return false } } - // Do not recurse into ignored fields. + // the field is explicitly ignored. if tagValue == "-" { continue } - if valid := c.checkType(field.Type(), tag); !valid { + if !c.isValidType(field.Type(), tag) { return false } } @@ -254,25 +242,29 @@ func implementsInterface(typ types.Type, ifaces []string, imports []*types.Packa } for _, ifacePath := range ifaces { - // "encoding/json.Marshaler" -> "encoding/json" + "Marshaler" + // e.g. "encoding/json.Marshaler" -> "encoding/json" + "Marshaler". idx := strings.LastIndex(ifacePath, ".") if idx == -1 { continue } + pkgName, ifaceName := ifacePath[:idx], ifacePath[idx+1:] scope, ok := findScope(pkgName) if !ok { continue } + obj := scope.Lookup(ifaceName) if obj == nil { continue } + iface, ok := obj.Type().Underlying().(*types.Interface) if !ok { continue } + if types.Implements(typ, iface) || types.Implements(types.NewPointer(typ), iface) { return true } diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go index 3b974754c3..f9057fd273 100644 --- a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go +++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go @@ -25,15 +25,18 @@ package runtime import ( "errors" - "math" cg "go.uber.org/automaxprocs/internal/cgroups" ) // CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process -// to a valid GOMAXPROCS value. -func CPUQuotaToGOMAXPROCS(minValue int) (int, CPUQuotaStatus, error) { - cgroups, err := newQueryer() +// to a valid GOMAXPROCS value. The quota is converted from float to int using round. +// If round == nil, DefaultRoundFunc is used. +func CPUQuotaToGOMAXPROCS(minValue int, round func(v float64) int) (int, CPUQuotaStatus, error) { + if round == nil { + round = DefaultRoundFunc + } + cgroups, err := _newQueryer() if err != nil { return -1, CPUQuotaUndefined, err } @@ -43,7 +46,7 @@ func CPUQuotaToGOMAXPROCS(minValue int) (int, CPUQuotaStatus, error) { return -1, CPUQuotaUndefined, err } - maxProcs := int(math.Floor(quota)) + maxProcs := round(quota) if minValue > 0 && maxProcs < minValue { return minValue, CPUQuotaMinUsed, nil } @@ -57,6 +60,7 @@ type queryer interface { var ( _newCgroups2 = cg.NewCGroups2ForCurrentProcess _newCgroups = cg.NewCGroupsForCurrentProcess + _newQueryer = newQueryer ) func newQueryer() (queryer, error) { diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go index 6922554484..e74701508e 100644 --- a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go +++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go @@ -26,6 +26,6 @@ package runtime // CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process // to a valid GOMAXPROCS value. This is Linux-specific and not supported in the // current OS. -func CPUQuotaToGOMAXPROCS(_ int) (int, CPUQuotaStatus, error) { +func CPUQuotaToGOMAXPROCS(_ int, _ func(v float64) int) (int, CPUQuotaStatus, error) { return -1, CPUQuotaUndefined, nil } diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go index df6eacf053..f8a2834ac0 100644 --- a/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go +++ b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go @@ -20,6 +20,8 @@ package runtime +import "math" + // CPUQuotaStatus presents the status of how CPU quota is used type CPUQuotaStatus int @@ -31,3 +33,8 @@ const ( // CPUQuotaMinUsed is returned when CPU quota is smaller than the min value CPUQuotaMinUsed ) + +// DefaultRoundFunc is the default function to convert CPU quota from float to int. It rounds the value down (floor). +func DefaultRoundFunc(v float64) int { + return int(math.Floor(v)) +} diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go index 98176d6457..e561fe60b2 100644 --- a/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go +++ b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go @@ -37,9 +37,10 @@ func currentMaxProcs() int { } type config struct { - printf func(string, ...interface{}) - procs func(int) (int, iruntime.CPUQuotaStatus, error) - minGOMAXPROCS int + printf func(string, ...interface{}) + procs func(int, func(v float64) int) (int, iruntime.CPUQuotaStatus, error) + minGOMAXPROCS int + roundQuotaFunc func(v float64) int } func (c *config) log(fmt string, args ...interface{}) { @@ -71,6 +72,13 @@ func Min(n int) Option { }) } +// RoundQuotaFunc sets the function that will be used to covert the CPU quota from float to int. +func RoundQuotaFunc(rf func(v float64) int) Option { + return optionFunc(func(cfg *config) { + cfg.roundQuotaFunc = rf + }) +} + type optionFunc func(*config) func (of optionFunc) apply(cfg *config) { of(cfg) } @@ -82,8 +90,9 @@ func (of optionFunc) apply(cfg *config) { of(cfg) } // configured CPU quota. func Set(opts ...Option) (func(), error) { cfg := &config{ - procs: iruntime.CPUQuotaToGOMAXPROCS, - minGOMAXPROCS: 1, + procs: iruntime.CPUQuotaToGOMAXPROCS, + roundQuotaFunc: iruntime.DefaultRoundFunc, + minGOMAXPROCS: 1, } for _, o := range opts { o.apply(cfg) @@ -102,7 +111,7 @@ func Set(opts ...Option) (func(), error) { return undoNoop, nil } - maxProcs, status, err := cfg.procs(cfg.minGOMAXPROCS) + maxProcs, status, err := cfg.procs(cfg.minGOMAXPROCS, cfg.roundQuotaFunc) if err != nil { return undoNoop, err } diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/version.go b/vendor/go.uber.org/automaxprocs/maxprocs/version.go index 108a95535e..cc7fc5aee1 100644 --- a/vendor/go.uber.org/automaxprocs/maxprocs/version.go +++ b/vendor/go.uber.org/automaxprocs/maxprocs/version.go @@ -21,4 +21,4 @@ package maxprocs // Version is the current package version. -const Version = "1.5.2" +const Version = "1.6.0" diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go index db42e6676a..c709b72847 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build (!arm64 && !s390x && !ppc64le) || !gc || purego +//go:build (!arm64 && !s390x && !ppc64 && !ppc64le) || !gc || purego package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go similarity index 89% rename from vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go rename to vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go index 3a4287f990..bd183d9ba1 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go +++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build gc && !purego +//go:build gc && !purego && (ppc64 || ppc64le) package chacha20 diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s similarity index 76% rename from vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s rename to vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s index c672ccf698..a660b4112f 100644 --- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s +++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s @@ -19,7 +19,7 @@ // The differences in this and the original implementation are // due to the calling conventions and initialization of constants. -//go:build gc && !purego +//go:build gc && !purego && (ppc64 || ppc64le) #include "textflag.h" @@ -36,32 +36,68 @@ // for VPERMXOR #define MASK R18 -DATA consts<>+0x00(SB)/8, $0x3320646e61707865 -DATA consts<>+0x08(SB)/8, $0x6b20657479622d32 -DATA consts<>+0x10(SB)/8, $0x0000000000000001 -DATA consts<>+0x18(SB)/8, $0x0000000000000000 -DATA consts<>+0x20(SB)/8, $0x0000000000000004 -DATA consts<>+0x28(SB)/8, $0x0000000000000000 -DATA consts<>+0x30(SB)/8, $0x0a0b08090e0f0c0d -DATA consts<>+0x38(SB)/8, $0x0203000106070405 -DATA consts<>+0x40(SB)/8, $0x090a0b080d0e0f0c -DATA consts<>+0x48(SB)/8, $0x0102030005060704 -DATA consts<>+0x50(SB)/8, $0x6170786561707865 -DATA consts<>+0x58(SB)/8, $0x6170786561707865 -DATA consts<>+0x60(SB)/8, $0x3320646e3320646e -DATA consts<>+0x68(SB)/8, $0x3320646e3320646e -DATA consts<>+0x70(SB)/8, $0x79622d3279622d32 -DATA consts<>+0x78(SB)/8, $0x79622d3279622d32 -DATA consts<>+0x80(SB)/8, $0x6b2065746b206574 -DATA consts<>+0x88(SB)/8, $0x6b2065746b206574 -DATA consts<>+0x90(SB)/8, $0x0000000100000000 -DATA consts<>+0x98(SB)/8, $0x0000000300000002 -DATA consts<>+0xa0(SB)/8, $0x5566774411223300 -DATA consts<>+0xa8(SB)/8, $0xddeeffcc99aabb88 -DATA consts<>+0xb0(SB)/8, $0x6677445522330011 -DATA consts<>+0xb8(SB)/8, $0xeeffccddaabb8899 +DATA consts<>+0x00(SB)/4, $0x61707865 +DATA consts<>+0x04(SB)/4, $0x3320646e +DATA consts<>+0x08(SB)/4, $0x79622d32 +DATA consts<>+0x0c(SB)/4, $0x6b206574 +DATA consts<>+0x10(SB)/4, $0x00000001 +DATA consts<>+0x14(SB)/4, $0x00000000 +DATA consts<>+0x18(SB)/4, $0x00000000 +DATA consts<>+0x1c(SB)/4, $0x00000000 +DATA consts<>+0x20(SB)/4, $0x00000004 +DATA consts<>+0x24(SB)/4, $0x00000000 +DATA consts<>+0x28(SB)/4, $0x00000000 +DATA consts<>+0x2c(SB)/4, $0x00000000 +DATA consts<>+0x30(SB)/4, $0x0e0f0c0d +DATA consts<>+0x34(SB)/4, $0x0a0b0809 +DATA consts<>+0x38(SB)/4, $0x06070405 +DATA consts<>+0x3c(SB)/4, $0x02030001 +DATA consts<>+0x40(SB)/4, $0x0d0e0f0c +DATA consts<>+0x44(SB)/4, $0x090a0b08 +DATA consts<>+0x48(SB)/4, $0x05060704 +DATA consts<>+0x4c(SB)/4, $0x01020300 +DATA consts<>+0x50(SB)/4, $0x61707865 +DATA consts<>+0x54(SB)/4, $0x61707865 +DATA consts<>+0x58(SB)/4, $0x61707865 +DATA consts<>+0x5c(SB)/4, $0x61707865 +DATA consts<>+0x60(SB)/4, $0x3320646e +DATA consts<>+0x64(SB)/4, $0x3320646e +DATA consts<>+0x68(SB)/4, $0x3320646e +DATA consts<>+0x6c(SB)/4, $0x3320646e +DATA consts<>+0x70(SB)/4, $0x79622d32 +DATA consts<>+0x74(SB)/4, $0x79622d32 +DATA consts<>+0x78(SB)/4, $0x79622d32 +DATA consts<>+0x7c(SB)/4, $0x79622d32 +DATA consts<>+0x80(SB)/4, $0x6b206574 +DATA consts<>+0x84(SB)/4, $0x6b206574 +DATA consts<>+0x88(SB)/4, $0x6b206574 +DATA consts<>+0x8c(SB)/4, $0x6b206574 +DATA consts<>+0x90(SB)/4, $0x00000000 +DATA consts<>+0x94(SB)/4, $0x00000001 +DATA consts<>+0x98(SB)/4, $0x00000002 +DATA consts<>+0x9c(SB)/4, $0x00000003 +DATA consts<>+0xa0(SB)/4, $0x11223300 +DATA consts<>+0xa4(SB)/4, $0x55667744 +DATA consts<>+0xa8(SB)/4, $0x99aabb88 +DATA consts<>+0xac(SB)/4, $0xddeeffcc +DATA consts<>+0xb0(SB)/4, $0x22330011 +DATA consts<>+0xb4(SB)/4, $0x66774455 +DATA consts<>+0xb8(SB)/4, $0xaabb8899 +DATA consts<>+0xbc(SB)/4, $0xeeffccdd GLOBL consts<>(SB), RODATA, $0xc0 +#ifdef GOARCH_ppc64 +#define BE_XXBRW_INIT() \ + LVSL (R0)(R0), V24 \ + VSPLTISB $3, V25 \ + VXOR V24, V25, V24 \ + +#define BE_XXBRW(vr) VPERM vr, vr, V24, vr +#else +#define BE_XXBRW_INIT() +#define BE_XXBRW(vr) +#endif + //func chaCha20_ctr32_vsx(out, inp *byte, len int, key *[8]uint32, counter *uint32) TEXT ·chaCha20_ctr32_vsx(SB),NOSPLIT,$64-40 MOVD out+0(FP), OUT @@ -94,6 +130,8 @@ TEXT ·chaCha20_ctr32_vsx(SB),NOSPLIT,$64-40 // Clear V27 VXOR V27, V27, V27 + BE_XXBRW_INIT() + // V28 LXVW4X (CONSTBASE)(R11), VS60 @@ -299,6 +337,11 @@ loop_vsx: VADDUWM V8, V18, V8 VADDUWM V12, V19, V12 + BE_XXBRW(V0) + BE_XXBRW(V4) + BE_XXBRW(V8) + BE_XXBRW(V12) + CMPU LEN, $64 BLT tail_vsx @@ -327,6 +370,11 @@ loop_vsx: VADDUWM V9, V18, V8 VADDUWM V13, V19, V12 + BE_XXBRW(V0) + BE_XXBRW(V4) + BE_XXBRW(V8) + BE_XXBRW(V12) + CMPU LEN, $64 BLT tail_vsx @@ -334,8 +382,8 @@ loop_vsx: LXVW4X (INP)(R8), VS60 LXVW4X (INP)(R9), VS61 LXVW4X (INP)(R10), VS62 - VXOR V27, V0, V27 + VXOR V27, V0, V27 VXOR V28, V4, V28 VXOR V29, V8, V29 VXOR V30, V12, V30 @@ -354,6 +402,11 @@ loop_vsx: VADDUWM V10, V18, V8 VADDUWM V14, V19, V12 + BE_XXBRW(V0) + BE_XXBRW(V4) + BE_XXBRW(V8) + BE_XXBRW(V12) + CMPU LEN, $64 BLT tail_vsx @@ -381,6 +434,11 @@ loop_vsx: VADDUWM V11, V18, V8 VADDUWM V15, V19, V12 + BE_XXBRW(V0) + BE_XXBRW(V4) + BE_XXBRW(V8) + BE_XXBRW(V12) + CMPU LEN, $64 BLT tail_vsx @@ -408,9 +466,9 @@ loop_vsx: done_vsx: // Increment counter by number of 64 byte blocks - MOVD (CNT), R14 + MOVWZ (CNT), R14 ADD BLOCKS, R14 - MOVD R14, (CNT) + MOVWZ R14, (CNT) RET tail_vsx: diff --git a/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go index 333da285b3..bd896bdc76 100644 --- a/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build (!amd64 && !ppc64le && !s390x) || !gc || purego +//go:build (!amd64 && !ppc64le && !ppc64 && !s390x) || !gc || purego package poly1305 diff --git a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go similarity index 95% rename from vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go rename to vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go index 4aec4874b5..1a1679aaad 100644 --- a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build gc && !purego +//go:build gc && !purego && (ppc64 || ppc64le) package poly1305 diff --git a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s similarity index 89% rename from vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s rename to vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s index b3c1699bff..6899a1dabc 100644 --- a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s +++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s @@ -2,15 +2,25 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build gc && !purego +//go:build gc && !purego && (ppc64 || ppc64le) #include "textflag.h" // This was ported from the amd64 implementation. +#ifdef GOARCH_ppc64le +#define LE_MOVD MOVD +#define LE_MOVWZ MOVWZ +#define LE_MOVHZ MOVHZ +#else +#define LE_MOVD MOVDBR +#define LE_MOVWZ MOVWBR +#define LE_MOVHZ MOVHBR +#endif + #define POLY1305_ADD(msg, h0, h1, h2, t0, t1, t2) \ - MOVD (msg), t0; \ - MOVD 8(msg), t1; \ + LE_MOVD (msg)( R0), t0; \ + LE_MOVD (msg)(R24), t1; \ MOVD $1, t2; \ ADDC t0, h0, h0; \ ADDE t1, h1, h1; \ @@ -50,10 +60,6 @@ ADDE t3, h1, h1; \ ADDZE h2 -DATA ·poly1305Mask<>+0x00(SB)/8, $0x0FFFFFFC0FFFFFFF -DATA ·poly1305Mask<>+0x08(SB)/8, $0x0FFFFFFC0FFFFFFC -GLOBL ·poly1305Mask<>(SB), RODATA, $16 - // func update(state *[7]uint64, msg []byte) TEXT ·update(SB), $0-32 MOVD state+0(FP), R3 @@ -66,6 +72,8 @@ TEXT ·update(SB), $0-32 MOVD 24(R3), R11 // r0 MOVD 32(R3), R12 // r1 + MOVD $8, R24 + CMP R5, $16 BLT bytes_between_0_and_15 @@ -94,7 +102,7 @@ flush_buffer: // Greater than 8 -- load the rightmost remaining bytes in msg // and put into R17 (h1) - MOVD (R4)(R21), R17 + LE_MOVD (R4)(R21), R17 MOVD $16, R22 // Find the offset to those bytes @@ -118,7 +126,7 @@ just1: BLT less8 // Exactly 8 - MOVD (R4), R16 + LE_MOVD (R4), R16 CMP R17, $0 @@ -133,7 +141,7 @@ less8: MOVD $0, R22 // shift count CMP R5, $4 BLT less4 - MOVWZ (R4), R16 + LE_MOVWZ (R4), R16 ADD $4, R4 ADD $-4, R5 MOVD $32, R22 @@ -141,7 +149,7 @@ less8: less4: CMP R5, $2 BLT less2 - MOVHZ (R4), R21 + LE_MOVHZ (R4), R21 SLD R22, R21, R21 OR R16, R21, R16 ADD $16, R22 diff --git a/vendor/golang.org/x/crypto/sha3/doc.go b/vendor/golang.org/x/crypto/sha3/doc.go index 7e02309070..bbf391fe6e 100644 --- a/vendor/golang.org/x/crypto/sha3/doc.go +++ b/vendor/golang.org/x/crypto/sha3/doc.go @@ -5,6 +5,10 @@ // Package sha3 implements the SHA-3 fixed-output-length hash functions and // the SHAKE variable-output-length hash functions defined by FIPS-202. // +// All types in this package also implement [encoding.BinaryMarshaler], +// [encoding.BinaryAppender] and [encoding.BinaryUnmarshaler] to marshal and +// unmarshal the internal state of the hash. +// // Both types of hash function use the "sponge" construction and the Keccak // permutation. For a detailed specification see http://keccak.noekeon.org/ // diff --git a/vendor/golang.org/x/crypto/sha3/hashes.go b/vendor/golang.org/x/crypto/sha3/hashes.go index c544b29e5f..31fffbe044 100644 --- a/vendor/golang.org/x/crypto/sha3/hashes.go +++ b/vendor/golang.org/x/crypto/sha3/hashes.go @@ -48,33 +48,52 @@ func init() { crypto.RegisterHash(crypto.SHA3_512, New512) } +const ( + dsbyteSHA3 = 0b00000110 + dsbyteKeccak = 0b00000001 + dsbyteShake = 0b00011111 + dsbyteCShake = 0b00000100 + + // rateK[c] is the rate in bytes for Keccak[c] where c is the capacity in + // bits. Given the sponge size is 1600 bits, the rate is 1600 - c bits. + rateK256 = (1600 - 256) / 8 + rateK448 = (1600 - 448) / 8 + rateK512 = (1600 - 512) / 8 + rateK768 = (1600 - 768) / 8 + rateK1024 = (1600 - 1024) / 8 +) + func new224Generic() *state { - return &state{rate: 144, outputLen: 28, dsbyte: 0x06} + return &state{rate: rateK448, outputLen: 28, dsbyte: dsbyteSHA3} } func new256Generic() *state { - return &state{rate: 136, outputLen: 32, dsbyte: 0x06} + return &state{rate: rateK512, outputLen: 32, dsbyte: dsbyteSHA3} } func new384Generic() *state { - return &state{rate: 104, outputLen: 48, dsbyte: 0x06} + return &state{rate: rateK768, outputLen: 48, dsbyte: dsbyteSHA3} } func new512Generic() *state { - return &state{rate: 72, outputLen: 64, dsbyte: 0x06} + return &state{rate: rateK1024, outputLen: 64, dsbyte: dsbyteSHA3} } // NewLegacyKeccak256 creates a new Keccak-256 hash. // // Only use this function if you require compatibility with an existing cryptosystem // that uses non-standard padding. All other users should use New256 instead. -func NewLegacyKeccak256() hash.Hash { return &state{rate: 136, outputLen: 32, dsbyte: 0x01} } +func NewLegacyKeccak256() hash.Hash { + return &state{rate: rateK512, outputLen: 32, dsbyte: dsbyteKeccak} +} // NewLegacyKeccak512 creates a new Keccak-512 hash. // // Only use this function if you require compatibility with an existing cryptosystem // that uses non-standard padding. All other users should use New512 instead. -func NewLegacyKeccak512() hash.Hash { return &state{rate: 72, outputLen: 64, dsbyte: 0x01} } +func NewLegacyKeccak512() hash.Hash { + return &state{rate: rateK1024, outputLen: 64, dsbyte: dsbyteKeccak} +} // Sum224 returns the SHA3-224 digest of the data. func Sum224(data []byte) (digest [28]byte) { diff --git a/vendor/golang.org/x/crypto/sha3/sha3.go b/vendor/golang.org/x/crypto/sha3/sha3.go index afedde5abf..6658c44479 100644 --- a/vendor/golang.org/x/crypto/sha3/sha3.go +++ b/vendor/golang.org/x/crypto/sha3/sha3.go @@ -4,6 +4,15 @@ package sha3 +import ( + "crypto/subtle" + "encoding/binary" + "errors" + "unsafe" + + "golang.org/x/sys/cpu" +) + // spongeDirection indicates the direction bytes are flowing through the sponge. type spongeDirection int @@ -14,16 +23,13 @@ const ( spongeSqueezing ) -const ( - // maxRate is the maximum size of the internal buffer. SHAKE-256 - // currently needs the largest buffer. - maxRate = 168 -) - type state struct { - // Generic sponge components. - a [25]uint64 // main state of the hash - rate int // the number of bytes of state to use + a [1600 / 8]byte // main state of the hash + + // a[n:rate] is the buffer. If absorbing, it's the remaining space to XOR + // into before running the permutation. If squeezing, it's the remaining + // output to produce before running the permutation. + n, rate int // dsbyte contains the "domain separation" bits and the first bit of // the padding. Sections 6.1 and 6.2 of [1] separate the outputs of the @@ -39,10 +45,6 @@ type state struct { // Extendable-Output Functions (May 2014)" dsbyte byte - i, n int // storage[i:n] is the buffer, i is only used while squeezing - storage [maxRate]byte - - // Specific to SHA-3 and SHAKE. outputLen int // the default output size in bytes state spongeDirection // whether the sponge is absorbing or squeezing } @@ -61,7 +63,7 @@ func (d *state) Reset() { d.a[i] = 0 } d.state = spongeAbsorbing - d.i, d.n = 0, 0 + d.n = 0 } func (d *state) clone() *state { @@ -69,22 +71,25 @@ func (d *state) clone() *state { return &ret } -// permute applies the KeccakF-1600 permutation. It handles -// any input-output buffering. +// permute applies the KeccakF-1600 permutation. func (d *state) permute() { - switch d.state { - case spongeAbsorbing: - // If we're absorbing, we need to xor the input into the state - // before applying the permutation. - xorIn(d, d.storage[:d.rate]) - d.n = 0 - keccakF1600(&d.a) - case spongeSqueezing: - // If we're squeezing, we need to apply the permutation before - // copying more output. - keccakF1600(&d.a) - d.i = 0 - copyOut(d, d.storage[:d.rate]) + var a *[25]uint64 + if cpu.IsBigEndian { + a = new([25]uint64) + for i := range a { + a[i] = binary.LittleEndian.Uint64(d.a[i*8:]) + } + } else { + a = (*[25]uint64)(unsafe.Pointer(&d.a)) + } + + keccakF1600(a) + d.n = 0 + + if cpu.IsBigEndian { + for i := range a { + binary.LittleEndian.PutUint64(d.a[i*8:], a[i]) + } } } @@ -92,53 +97,36 @@ func (d *state) permute() { // the multi-bitrate 10..1 padding rule, and permutes the state. func (d *state) padAndPermute() { // Pad with this instance's domain-separator bits. We know that there's - // at least one byte of space in d.buf because, if it were full, + // at least one byte of space in the sponge because, if it were full, // permute would have been called to empty it. dsbyte also contains the // first one bit for the padding. See the comment in the state struct. - d.storage[d.n] = d.dsbyte - d.n++ - for d.n < d.rate { - d.storage[d.n] = 0 - d.n++ - } + d.a[d.n] ^= d.dsbyte // This adds the final one bit for the padding. Because of the way that // bits are numbered from the LSB upwards, the final bit is the MSB of // the last byte. - d.storage[d.rate-1] ^= 0x80 + d.a[d.rate-1] ^= 0x80 // Apply the permutation d.permute() d.state = spongeSqueezing - d.n = d.rate - copyOut(d, d.storage[:d.rate]) } // Write absorbs more data into the hash's state. It panics if any // output has already been read. -func (d *state) Write(p []byte) (written int, err error) { +func (d *state) Write(p []byte) (n int, err error) { if d.state != spongeAbsorbing { panic("sha3: Write after Read") } - written = len(p) + + n = len(p) for len(p) > 0 { - if d.n == 0 && len(p) >= d.rate { - // The fast path; absorb a full "rate" bytes of input and apply the permutation. - xorIn(d, p[:d.rate]) - p = p[d.rate:] - keccakF1600(&d.a) - } else { - // The slow path; buffer the input until we can fill the sponge, and then xor it in. - todo := d.rate - d.n - if todo > len(p) { - todo = len(p) - } - d.n += copy(d.storage[d.n:], p[:todo]) - p = p[todo:] - - // If the sponge is full, apply the permutation. - if d.n == d.rate { - d.permute() - } + x := subtle.XORBytes(d.a[d.n:d.rate], d.a[d.n:d.rate], p) + d.n += x + p = p[x:] + + // If the sponge is full, apply the permutation. + if d.n == d.rate { + d.permute() } } @@ -156,14 +144,14 @@ func (d *state) Read(out []byte) (n int, err error) { // Now, do the squeezing. for len(out) > 0 { - n := copy(out, d.storage[d.i:d.n]) - d.i += n - out = out[n:] - // Apply the permutation if we've squeezed the sponge dry. - if d.i == d.rate { + if d.n == d.rate { d.permute() } + + x := copy(out, d.a[d.n:d.rate]) + d.n += x + out = out[x:] } return @@ -183,3 +171,74 @@ func (d *state) Sum(in []byte) []byte { dup.Read(hash) return append(in, hash...) } + +const ( + magicSHA3 = "sha\x08" + magicShake = "sha\x09" + magicCShake = "sha\x0a" + magicKeccak = "sha\x0b" + // magic || rate || main state || n || sponge direction + marshaledSize = len(magicSHA3) + 1 + 200 + 1 + 1 +) + +func (d *state) MarshalBinary() ([]byte, error) { + return d.AppendBinary(make([]byte, 0, marshaledSize)) +} + +func (d *state) AppendBinary(b []byte) ([]byte, error) { + switch d.dsbyte { + case dsbyteSHA3: + b = append(b, magicSHA3...) + case dsbyteShake: + b = append(b, magicShake...) + case dsbyteCShake: + b = append(b, magicCShake...) + case dsbyteKeccak: + b = append(b, magicKeccak...) + default: + panic("unknown dsbyte") + } + // rate is at most 168, and n is at most rate. + b = append(b, byte(d.rate)) + b = append(b, d.a[:]...) + b = append(b, byte(d.n), byte(d.state)) + return b, nil +} + +func (d *state) UnmarshalBinary(b []byte) error { + if len(b) != marshaledSize { + return errors.New("sha3: invalid hash state") + } + + magic := string(b[:len(magicSHA3)]) + b = b[len(magicSHA3):] + switch { + case magic == magicSHA3 && d.dsbyte == dsbyteSHA3: + case magic == magicShake && d.dsbyte == dsbyteShake: + case magic == magicCShake && d.dsbyte == dsbyteCShake: + case magic == magicKeccak && d.dsbyte == dsbyteKeccak: + default: + return errors.New("sha3: invalid hash state identifier") + } + + rate := int(b[0]) + b = b[1:] + if rate != d.rate { + return errors.New("sha3: invalid hash state function") + } + + copy(d.a[:], b) + b = b[len(d.a):] + + n, state := int(b[0]), spongeDirection(b[1]) + if n > d.rate { + return errors.New("sha3: invalid hash state") + } + d.n = n + if state != spongeAbsorbing && state != spongeSqueezing { + return errors.New("sha3: invalid hash state") + } + d.state = state + + return nil +} diff --git a/vendor/golang.org/x/crypto/sha3/shake.go b/vendor/golang.org/x/crypto/sha3/shake.go index a01ef43577..a6b3a4281f 100644 --- a/vendor/golang.org/x/crypto/sha3/shake.go +++ b/vendor/golang.org/x/crypto/sha3/shake.go @@ -16,9 +16,12 @@ package sha3 // [2] https://doi.org/10.6028/NIST.SP.800-185 import ( + "bytes" "encoding/binary" + "errors" "hash" "io" + "math/bits" ) // ShakeHash defines the interface to hash functions that support @@ -50,41 +53,33 @@ type cshakeState struct { initBlock []byte } -// Consts for configuring initial SHA-3 state -const ( - dsbyteShake = 0x1f - dsbyteCShake = 0x04 - rate128 = 168 - rate256 = 136 -) +func bytepad(data []byte, rate int) []byte { + out := make([]byte, 0, 9+len(data)+rate-1) + out = append(out, leftEncode(uint64(rate))...) + out = append(out, data...) + if padlen := rate - len(out)%rate; padlen < rate { + out = append(out, make([]byte, padlen)...) + } + return out +} -func bytepad(input []byte, w int) []byte { - // leftEncode always returns max 9 bytes - buf := make([]byte, 0, 9+len(input)+w) - buf = append(buf, leftEncode(uint64(w))...) - buf = append(buf, input...) - padlen := w - (len(buf) % w) - return append(buf, make([]byte, padlen)...) -} - -func leftEncode(value uint64) []byte { - var b [9]byte - binary.BigEndian.PutUint64(b[1:], value) - // Trim all but last leading zero bytes - i := byte(1) - for i < 8 && b[i] == 0 { - i++ +func leftEncode(x uint64) []byte { + // Let n be the smallest positive integer for which 2^(8n) > x. + n := (bits.Len64(x) + 7) / 8 + if n == 0 { + n = 1 } - // Prepend number of encoded bytes - b[i-1] = 9 - i - return b[i-1:] + // Return n || x with n as a byte and x an n bytes in big-endian order. + b := make([]byte, 9) + binary.BigEndian.PutUint64(b[1:], x) + b = b[9-n-1:] + b[0] = byte(n) + return b } func newCShake(N, S []byte, rate, outputLen int, dsbyte byte) ShakeHash { c := cshakeState{state: &state{rate: rate, outputLen: outputLen, dsbyte: dsbyte}} - - // leftEncode returns max 9 bytes - c.initBlock = make([]byte, 0, 9*2+len(N)+len(S)) + c.initBlock = make([]byte, 0, 9+len(N)+9+len(S)) // leftEncode returns max 9 bytes c.initBlock = append(c.initBlock, leftEncode(uint64(len(N))*8)...) c.initBlock = append(c.initBlock, N...) c.initBlock = append(c.initBlock, leftEncode(uint64(len(S))*8)...) @@ -111,6 +106,30 @@ func (c *state) Clone() ShakeHash { return c.clone() } +func (c *cshakeState) MarshalBinary() ([]byte, error) { + return c.AppendBinary(make([]byte, 0, marshaledSize+len(c.initBlock))) +} + +func (c *cshakeState) AppendBinary(b []byte) ([]byte, error) { + b, err := c.state.AppendBinary(b) + if err != nil { + return nil, err + } + b = append(b, c.initBlock...) + return b, nil +} + +func (c *cshakeState) UnmarshalBinary(b []byte) error { + if len(b) <= marshaledSize { + return errors.New("sha3: invalid hash state") + } + if err := c.state.UnmarshalBinary(b[:marshaledSize]); err != nil { + return err + } + c.initBlock = bytes.Clone(b[marshaledSize:]) + return nil +} + // NewShake128 creates a new SHAKE128 variable-output-length ShakeHash. // Its generic security strength is 128 bits against all attacks if at // least 32 bytes of its output are used. @@ -126,11 +145,11 @@ func NewShake256() ShakeHash { } func newShake128Generic() *state { - return &state{rate: rate128, outputLen: 32, dsbyte: dsbyteShake} + return &state{rate: rateK256, outputLen: 32, dsbyte: dsbyteShake} } func newShake256Generic() *state { - return &state{rate: rate256, outputLen: 64, dsbyte: dsbyteShake} + return &state{rate: rateK512, outputLen: 64, dsbyte: dsbyteShake} } // NewCShake128 creates a new instance of cSHAKE128 variable-output-length ShakeHash, @@ -143,7 +162,7 @@ func NewCShake128(N, S []byte) ShakeHash { if len(N) == 0 && len(S) == 0 { return NewShake128() } - return newCShake(N, S, rate128, 32, dsbyteCShake) + return newCShake(N, S, rateK256, 32, dsbyteCShake) } // NewCShake256 creates a new instance of cSHAKE256 variable-output-length ShakeHash, @@ -156,7 +175,7 @@ func NewCShake256(N, S []byte) ShakeHash { if len(N) == 0 && len(S) == 0 { return NewShake256() } - return newCShake(N, S, rate256, 64, dsbyteCShake) + return newCShake(N, S, rateK512, 64, dsbyteCShake) } // ShakeSum128 writes an arbitrary-length digest of data into hash. diff --git a/vendor/golang.org/x/crypto/sha3/xor.go b/vendor/golang.org/x/crypto/sha3/xor.go deleted file mode 100644 index 6ada5c9574..0000000000 --- a/vendor/golang.org/x/crypto/sha3/xor.go +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2015 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package sha3 - -import ( - "crypto/subtle" - "encoding/binary" - "unsafe" - - "golang.org/x/sys/cpu" -) - -// xorIn xors the bytes in buf into the state. -func xorIn(d *state, buf []byte) { - if cpu.IsBigEndian { - for i := 0; len(buf) >= 8; i++ { - a := binary.LittleEndian.Uint64(buf) - d.a[i] ^= a - buf = buf[8:] - } - } else { - ab := (*[25 * 64 / 8]byte)(unsafe.Pointer(&d.a)) - subtle.XORBytes(ab[:], ab[:], buf) - } -} - -// copyOut copies uint64s to a byte buffer. -func copyOut(d *state, b []byte) { - if cpu.IsBigEndian { - for i := 0; len(b) >= 8; i++ { - binary.LittleEndian.PutUint64(b, d.a[i]) - b = b[8:] - } - } else { - ab := (*[25 * 64 / 8]byte)(unsafe.Pointer(&d.a)) - copy(b, ab[:]) - } -} diff --git a/vendor/golang.org/x/crypto/ssh/client_auth.go b/vendor/golang.org/x/crypto/ssh/client_auth.go index b93961010d..b86dde151d 100644 --- a/vendor/golang.org/x/crypto/ssh/client_auth.go +++ b/vendor/golang.org/x/crypto/ssh/client_auth.go @@ -555,6 +555,7 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe } gotMsgExtInfo := false + gotUserAuthInfoRequest := false for { packet, err := c.readPacket() if err != nil { @@ -585,6 +586,9 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe if msg.PartialSuccess { return authPartialSuccess, msg.Methods, nil } + if !gotUserAuthInfoRequest { + return authFailure, msg.Methods, unexpectedMessageError(msgUserAuthInfoRequest, packet[0]) + } return authFailure, msg.Methods, nil case msgUserAuthSuccess: return authSuccess, nil, nil @@ -596,6 +600,7 @@ func (cb KeyboardInteractiveChallenge) auth(session []byte, user string, c packe if err := Unmarshal(packet, &msg); err != nil { return authFailure, nil, err } + gotUserAuthInfoRequest = true // Manually unpack the prompt/echo pairs. rest := msg.Prompts diff --git a/vendor/golang.org/x/exp/typeparams/LICENSE b/vendor/golang.org/x/exp/typeparams/LICENSE index 6a66aea5ea..2a7cf70da6 100644 --- a/vendor/golang.org/x/exp/typeparams/LICENSE +++ b/vendor/golang.org/x/exp/typeparams/LICENSE @@ -1,4 +1,4 @@ -Copyright (c) 2009 The Go Authors. All rights reserved. +Copyright 2009 The Go Authors. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are @@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer. copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - * Neither the name of Google Inc. nor the names of its + * Neither the name of Google LLC nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. diff --git a/vendor/golang.org/x/net/http2/client_conn_pool.go b/vendor/golang.org/x/net/http2/client_conn_pool.go index 780968d6c1..e81b73e6a7 100644 --- a/vendor/golang.org/x/net/http2/client_conn_pool.go +++ b/vendor/golang.org/x/net/http2/client_conn_pool.go @@ -8,8 +8,8 @@ package http2 import ( "context" - "crypto/tls" "errors" + "net" "net/http" "sync" ) @@ -158,7 +158,7 @@ func (c *dialCall) dial(ctx context.Context, addr string) { // This code decides which ones live or die. // The return value used is whether c was used. // c is never closed. -func (p *clientConnPool) addConnIfNeeded(key string, t *Transport, c *tls.Conn) (used bool, err error) { +func (p *clientConnPool) addConnIfNeeded(key string, t *Transport, c net.Conn) (used bool, err error) { p.mu.Lock() for _, cc := range p.conns[key] { if cc.CanTakeNewRequest() { @@ -194,8 +194,8 @@ type addConnCall struct { err error } -func (c *addConnCall) run(t *Transport, key string, tc *tls.Conn) { - cc, err := t.NewClientConn(tc) +func (c *addConnCall) run(t *Transport, key string, nc net.Conn) { + cc, err := t.NewClientConn(nc) p := c.p p.mu.Lock() diff --git a/vendor/golang.org/x/net/http2/server.go b/vendor/golang.org/x/net/http2/server.go index 617b4a4762..832414b450 100644 --- a/vendor/golang.org/x/net/http2/server.go +++ b/vendor/golang.org/x/net/http2/server.go @@ -306,7 +306,7 @@ func ConfigureServer(s *http.Server, conf *Server) error { if s.TLSNextProto == nil { s.TLSNextProto = map[string]func(*http.Server, *tls.Conn, http.Handler){} } - protoHandler := func(hs *http.Server, c *tls.Conn, h http.Handler) { + protoHandler := func(hs *http.Server, c net.Conn, h http.Handler, sawClientPreface bool) { if testHookOnConn != nil { testHookOnConn() } @@ -323,12 +323,31 @@ func ConfigureServer(s *http.Server, conf *Server) error { ctx = bc.BaseContext() } conf.ServeConn(c, &ServeConnOpts{ - Context: ctx, - Handler: h, - BaseConfig: hs, + Context: ctx, + Handler: h, + BaseConfig: hs, + SawClientPreface: sawClientPreface, }) } - s.TLSNextProto[NextProtoTLS] = protoHandler + s.TLSNextProto[NextProtoTLS] = func(hs *http.Server, c *tls.Conn, h http.Handler) { + protoHandler(hs, c, h, false) + } + // The "unencrypted_http2" TLSNextProto key is used to pass off non-TLS HTTP/2 conns. + // + // A connection passed in this method has already had the HTTP/2 preface read from it. + s.TLSNextProto[nextProtoUnencryptedHTTP2] = func(hs *http.Server, c *tls.Conn, h http.Handler) { + nc, err := unencryptedNetConnFromTLSConn(c) + if err != nil { + if lg := hs.ErrorLog; lg != nil { + lg.Print(err) + } else { + log.Print(err) + } + go c.Close() + return + } + protoHandler(hs, nc, h, true) + } return nil } @@ -2880,6 +2899,11 @@ func (w *responseWriter) SetWriteDeadline(deadline time.Time) error { return nil } +func (w *responseWriter) EnableFullDuplex() error { + // We always support full duplex responses, so this is a no-op. + return nil +} + func (w *responseWriter) Flush() { w.FlushError() } diff --git a/vendor/golang.org/x/net/http2/transport.go b/vendor/golang.org/x/net/http2/transport.go index 0c5f64aa8b..f5968f4407 100644 --- a/vendor/golang.org/x/net/http2/transport.go +++ b/vendor/golang.org/x/net/http2/transport.go @@ -202,6 +202,20 @@ func (t *Transport) markNewGoroutine() { } } +func (t *Transport) now() time.Time { + if t != nil && t.transportTestHooks != nil { + return t.transportTestHooks.group.Now() + } + return time.Now() +} + +func (t *Transport) timeSince(when time.Time) time.Duration { + if t != nil && t.transportTestHooks != nil { + return t.now().Sub(when) + } + return time.Since(when) +} + // newTimer creates a new time.Timer, or a synthetic timer in tests. func (t *Transport) newTimer(d time.Duration) timer { if t.transportTestHooks != nil { @@ -281,8 +295,8 @@ func configureTransports(t1 *http.Transport) (*Transport, error) { if !strSliceContains(t1.TLSClientConfig.NextProtos, "http/1.1") { t1.TLSClientConfig.NextProtos = append(t1.TLSClientConfig.NextProtos, "http/1.1") } - upgradeFn := func(authority string, c *tls.Conn) http.RoundTripper { - addr := authorityAddr("https", authority) + upgradeFn := func(scheme, authority string, c net.Conn) http.RoundTripper { + addr := authorityAddr(scheme, authority) if used, err := connPool.addConnIfNeeded(addr, t2, c); err != nil { go c.Close() return erringRoundTripper{err} @@ -293,18 +307,37 @@ func configureTransports(t1 *http.Transport) (*Transport, error) { // was unknown) go c.Close() } + if scheme == "http" { + return (*unencryptedTransport)(t2) + } return t2 } - if m := t1.TLSNextProto; len(m) == 0 { - t1.TLSNextProto = map[string]func(string, *tls.Conn) http.RoundTripper{ - "h2": upgradeFn, + if t1.TLSNextProto == nil { + t1.TLSNextProto = make(map[string]func(string, *tls.Conn) http.RoundTripper) + } + t1.TLSNextProto[NextProtoTLS] = func(authority string, c *tls.Conn) http.RoundTripper { + return upgradeFn("https", authority, c) + } + // The "unencrypted_http2" TLSNextProto key is used to pass off non-TLS HTTP/2 conns. + t1.TLSNextProto[nextProtoUnencryptedHTTP2] = func(authority string, c *tls.Conn) http.RoundTripper { + nc, err := unencryptedNetConnFromTLSConn(c) + if err != nil { + go c.Close() + return erringRoundTripper{err} } - } else { - m["h2"] = upgradeFn + return upgradeFn("http", authority, nc) } return t2, nil } +// unencryptedTransport is a Transport with a RoundTrip method that +// always permits http:// URLs. +type unencryptedTransport Transport + +func (t *unencryptedTransport) RoundTrip(req *http.Request) (*http.Response, error) { + return (*Transport)(t).RoundTripOpt(req, RoundTripOpt{allowHTTP: true}) +} + func (t *Transport) connPool() ClientConnPool { t.connPoolOnce.Do(t.initConnPool) return t.connPoolOrDef @@ -324,7 +357,7 @@ type ClientConn struct { t *Transport tconn net.Conn // usually *tls.Conn, except specialized impls tlsState *tls.ConnectionState // nil only for specialized impls - reused uint32 // whether conn is being reused; atomic + atomicReused uint32 // whether conn is being reused; atomic singleUse bool // whether being used for a single http.Request getConnCalled bool // used by clientConnPool @@ -364,6 +397,14 @@ type ClientConn struct { readIdleTimeout time.Duration pingTimeout time.Duration + // pendingResets is the number of RST_STREAM frames we have sent to the peer, + // without confirming that the peer has received them. When we send a RST_STREAM, + // we bundle it with a PING frame, unless a PING is already in flight. We count + // the reset stream against the connection's concurrency limit until we get + // a PING response. This limits the number of requests we'll try to send to a + // completely unresponsive connection. + pendingResets int + // reqHeaderMu is a 1-element semaphore channel controlling access to sending new requests. // Write to reqHeaderMu to lock it, read from it to unlock. // Lock reqmu BEFORE mu or wmu. @@ -420,12 +461,12 @@ type clientStream struct { sentHeaders bool // owned by clientConnReadLoop: - firstByte bool // got the first response byte - pastHeaders bool // got first MetaHeadersFrame (actual headers) - pastTrailers bool // got optional second MetaHeadersFrame (trailers) - num1xx uint8 // number of 1xx responses seen - readClosed bool // peer sent an END_STREAM flag - readAborted bool // read loop reset the stream + firstByte bool // got the first response byte + pastHeaders bool // got first MetaHeadersFrame (actual headers) + pastTrailers bool // got optional second MetaHeadersFrame (trailers) + readClosed bool // peer sent an END_STREAM flag + readAborted bool // read loop reset the stream + totalHeaderSize int64 // total size of 1xx headers seen trailer http.Header // accumulated trailers resTrailer *http.Header // client's Response.Trailer @@ -530,6 +571,8 @@ type RoundTripOpt struct { // no cached connection is available, RoundTripOpt // will return ErrNoCachedConn. OnlyCachedConn bool + + allowHTTP bool // allow http:// URLs } func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) { @@ -562,7 +605,14 @@ func authorityAddr(scheme string, authority string) (addr string) { // RoundTripOpt is like RoundTrip, but takes options. func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Response, error) { - if !(req.URL.Scheme == "https" || (req.URL.Scheme == "http" && t.AllowHTTP)) { + switch req.URL.Scheme { + case "https": + // Always okay. + case "http": + if !t.AllowHTTP && !opt.allowHTTP { + return nil, errors.New("http2: unencrypted HTTP/2 not enabled") + } + default: return nil, errors.New("http2: unsupported scheme") } @@ -573,7 +623,7 @@ func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Res t.vlogf("http2: Transport failed to get client conn for %s: %v", addr, err) return nil, err } - reused := !atomic.CompareAndSwapUint32(&cc.reused, 0, 1) + reused := !atomic.CompareAndSwapUint32(&cc.atomicReused, 0, 1) traceGotConn(req, cc, reused) res, err := cc.RoundTrip(req) if err != nil && retry <= 6 { @@ -598,6 +648,22 @@ func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Res } } } + if err == errClientConnNotEstablished { + // This ClientConn was created recently, + // this is the first request to use it, + // and the connection is closed and not usable. + // + // In this state, cc.idleTimer will remove the conn from the pool + // when it fires. Stop the timer and remove it here so future requests + // won't try to use this connection. + // + // If the timer has already fired and we're racing it, the redundant + // call to MarkDead is harmless. + if cc.idleTimer != nil { + cc.idleTimer.Stop() + } + t.connPool().MarkDead(cc) + } if err != nil { t.vlogf("RoundTrip failure: %v", err) return nil, err @@ -616,9 +682,10 @@ func (t *Transport) CloseIdleConnections() { } var ( - errClientConnClosed = errors.New("http2: client conn is closed") - errClientConnUnusable = errors.New("http2: client conn not usable") - errClientConnGotGoAway = errors.New("http2: Transport received Server's graceful shutdown GOAWAY") + errClientConnClosed = errors.New("http2: client conn is closed") + errClientConnUnusable = errors.New("http2: client conn not usable") + errClientConnNotEstablished = errors.New("http2: client conn could not be established") + errClientConnGotGoAway = errors.New("http2: Transport received Server's graceful shutdown GOAWAY") ) // shouldRetryRequest is called by RoundTrip when a request fails to get @@ -757,6 +824,7 @@ func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, erro pingTimeout: conf.PingTimeout, pings: make(map[[8]byte]chan struct{}), reqHeaderMu: make(chan struct{}, 1), + lastActive: t.now(), } var group synctestGroupInterface if t.transportTestHooks != nil { @@ -960,7 +1028,7 @@ func (cc *ClientConn) State() ClientConnState { return ClientConnState{ Closed: cc.closed, Closing: cc.closing || cc.singleUse || cc.doNotReuse || cc.goAway != nil, - StreamsActive: len(cc.streams), + StreamsActive: len(cc.streams) + cc.pendingResets, StreamsReserved: cc.streamsReserved, StreamsPending: cc.pendingRequests, LastIdle: cc.lastIdle, @@ -992,16 +1060,38 @@ func (cc *ClientConn) idleStateLocked() (st clientConnIdleState) { // writing it. maxConcurrentOkay = true } else { - maxConcurrentOkay = int64(len(cc.streams)+cc.streamsReserved+1) <= int64(cc.maxConcurrentStreams) + // We can take a new request if the total of + // - active streams; + // - reservation slots for new streams; and + // - streams for which we have sent a RST_STREAM and a PING, + // but received no subsequent frame + // is less than the concurrency limit. + maxConcurrentOkay = cc.currentRequestCountLocked() < int(cc.maxConcurrentStreams) } st.canTakeNewRequest = cc.goAway == nil && !cc.closed && !cc.closing && maxConcurrentOkay && !cc.doNotReuse && int64(cc.nextStreamID)+2*int64(cc.pendingRequests) < math.MaxInt32 && !cc.tooIdleLocked() + + // If this connection has never been used for a request and is closed, + // then let it take a request (which will fail). + // + // This avoids a situation where an error early in a connection's lifetime + // goes unreported. + if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed { + st.canTakeNewRequest = true + } + return } +// currentRequestCountLocked reports the number of concurrency slots currently in use, +// including active streams, reserved slots, and reset streams waiting for acknowledgement. +func (cc *ClientConn) currentRequestCountLocked() int { + return len(cc.streams) + cc.streamsReserved + cc.pendingResets +} + func (cc *ClientConn) canTakeNewRequestLocked() bool { st := cc.idleStateLocked() return st.canTakeNewRequest @@ -1014,7 +1104,7 @@ func (cc *ClientConn) tooIdleLocked() bool { // times are compared based on their wall time. We don't want // to reuse a connection that's been sitting idle during // VM/laptop suspend if monotonic time was also frozen. - return cc.idleTimeout != 0 && !cc.lastIdle.IsZero() && time.Since(cc.lastIdle.Round(0)) > cc.idleTimeout + return cc.idleTimeout != 0 && !cc.lastIdle.IsZero() && cc.t.timeSince(cc.lastIdle.Round(0)) > cc.idleTimeout } // onIdleTimeout is called from a time.AfterFunc goroutine. It will @@ -1578,6 +1668,7 @@ func (cs *clientStream) cleanupWriteRequest(err error) { cs.reqBodyClosed = make(chan struct{}) } bodyClosed := cs.reqBodyClosed + closeOnIdle := cc.singleUse || cc.doNotReuse || cc.t.disableKeepAlives() || cc.goAway != nil cc.mu.Unlock() if mustCloseBody { cs.reqBody.Close() @@ -1602,16 +1693,40 @@ func (cs *clientStream) cleanupWriteRequest(err error) { if cs.sentHeaders { if se, ok := err.(StreamError); ok { if se.Cause != errFromPeer { - cc.writeStreamReset(cs.ID, se.Code, err) + cc.writeStreamReset(cs.ID, se.Code, false, err) } } else { - cc.writeStreamReset(cs.ID, ErrCodeCancel, err) + // We're cancelling an in-flight request. + // + // This could be due to the server becoming unresponsive. + // To avoid sending too many requests on a dead connection, + // we let the request continue to consume a concurrency slot + // until we can confirm the server is still responding. + // We do this by sending a PING frame along with the RST_STREAM + // (unless a ping is already in flight). + // + // For simplicity, we don't bother tracking the PING payload: + // We reset cc.pendingResets any time we receive a PING ACK. + // + // We skip this if the conn is going to be closed on idle, + // because it's short lived and will probably be closed before + // we get the ping response. + ping := false + if !closeOnIdle { + cc.mu.Lock() + if cc.pendingResets == 0 { + ping = true + } + cc.pendingResets++ + cc.mu.Unlock() + } + cc.writeStreamReset(cs.ID, ErrCodeCancel, ping, err) } } cs.bufPipe.CloseWithError(err) // no-op if already closed } else { if cs.sentHeaders && !cs.sentEndStream { - cc.writeStreamReset(cs.ID, ErrCodeNo, nil) + cc.writeStreamReset(cs.ID, ErrCodeNo, false, nil) } cs.bufPipe.CloseWithError(errRequestCanceled) } @@ -1633,12 +1748,17 @@ func (cs *clientStream) cleanupWriteRequest(err error) { // Must hold cc.mu. func (cc *ClientConn) awaitOpenSlotForStreamLocked(cs *clientStream) error { for { - cc.lastActive = time.Now() + if cc.closed && cc.nextStreamID == 1 && cc.streamsReserved == 0 { + // This is the very first request sent to this connection. + // Return a fatal error which aborts the retry loop. + return errClientConnNotEstablished + } + cc.lastActive = cc.t.now() if cc.closed || !cc.canTakeNewRequestLocked() { return errClientConnUnusable } cc.lastIdle = time.Time{} - if int64(len(cc.streams)) < int64(cc.maxConcurrentStreams) { + if cc.currentRequestCountLocked() < int(cc.maxConcurrentStreams) { return nil } cc.pendingRequests++ @@ -2180,10 +2300,10 @@ func (cc *ClientConn) forgetStreamID(id uint32) { if len(cc.streams) != slen-1 { panic("forgetting unknown stream id") } - cc.lastActive = time.Now() + cc.lastActive = cc.t.now() if len(cc.streams) == 0 && cc.idleTimer != nil { cc.idleTimer.Reset(cc.idleTimeout) - cc.lastIdle = time.Now() + cc.lastIdle = cc.t.now() } // Wake up writeRequestBody via clientStream.awaitFlowControl and // wake up RoundTrip if there is a pending request. @@ -2243,7 +2363,6 @@ func isEOFOrNetReadError(err error) bool { func (rl *clientConnReadLoop) cleanup() { cc := rl.cc - cc.t.connPool().MarkDead(cc) defer cc.closeConn() defer close(cc.readerDone) @@ -2267,6 +2386,24 @@ func (rl *clientConnReadLoop) cleanup() { } cc.closed = true + // If the connection has never been used, and has been open for only a short time, + // leave it in the connection pool for a little while. + // + // This avoids a situation where new connections are constantly created, + // added to the pool, fail, and are removed from the pool, without any error + // being surfaced to the user. + const unusedWaitTime = 5 * time.Second + idleTime := cc.t.now().Sub(cc.lastActive) + if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime { + cc.idleTimer = cc.t.afterFunc(unusedWaitTime-idleTime, func() { + cc.t.connPool().MarkDead(cc) + }) + } else { + cc.mu.Unlock() // avoid any deadlocks in MarkDead + cc.t.connPool().MarkDead(cc) + cc.mu.Lock() + } + for _, cs := range cc.streams { select { case <-cs.peerClosed: @@ -2494,15 +2631,34 @@ func (rl *clientConnReadLoop) handleResponse(cs *clientStream, f *MetaHeadersFra if f.StreamEnded() { return nil, errors.New("1xx informational response with END_STREAM flag") } - cs.num1xx++ - const max1xxResponses = 5 // arbitrary bound on number of informational responses, same as net/http - if cs.num1xx > max1xxResponses { - return nil, errors.New("http2: too many 1xx informational responses") - } if fn := cs.get1xxTraceFunc(); fn != nil { + // If the 1xx response is being delivered to the user, + // then they're responsible for limiting the number + // of responses. if err := fn(statusCode, textproto.MIMEHeader(header)); err != nil { return nil, err } + } else { + // If the user didn't examine the 1xx response, then we + // limit the size of all 1xx headers. + // + // This differs a bit from the HTTP/1 implementation, which + // limits the size of all 1xx headers plus the final response. + // Use the larger limit of MaxHeaderListSize and + // net/http.Transport.MaxResponseHeaderBytes. + limit := int64(cs.cc.t.maxHeaderListSize()) + if t1 := cs.cc.t.t1; t1 != nil && t1.MaxResponseHeaderBytes > limit { + limit = t1.MaxResponseHeaderBytes + } + for _, h := range f.Fields { + cs.totalHeaderSize += int64(h.Size()) + } + if cs.totalHeaderSize > limit { + if VerboseLogs { + log.Printf("http2: 1xx informational responses too large") + } + return nil, errors.New("header list too large") + } } if statusCode == 100 { traceGot100Continue(cs.trace) @@ -3046,6 +3202,11 @@ func (rl *clientConnReadLoop) processPing(f *PingFrame) error { close(c) delete(cc.pings, f.Data) } + if cc.pendingResets > 0 { + // See clientStream.cleanupWriteRequest. + cc.pendingResets = 0 + cc.cond.Broadcast() + } return nil } cc := rl.cc @@ -3068,13 +3229,20 @@ func (rl *clientConnReadLoop) processPushPromise(f *PushPromiseFrame) error { return ConnectionError(ErrCodeProtocol) } -func (cc *ClientConn) writeStreamReset(streamID uint32, code ErrCode, err error) { +// writeStreamReset sends a RST_STREAM frame. +// When ping is true, it also sends a PING frame with a random payload. +func (cc *ClientConn) writeStreamReset(streamID uint32, code ErrCode, ping bool, err error) { // TODO: map err to more interesting error codes, once the // HTTP community comes up with some. But currently for // RST_STREAM there's no equivalent to GOAWAY frame's debug // data, and the error codes are all pretty vague ("cancel"). cc.wmu.Lock() cc.fr.WriteRSTStream(streamID, code) + if ping { + var payload [8]byte + rand.Read(payload[:]) + cc.fr.WritePing(false, payload) + } cc.bw.Flush() cc.wmu.Unlock() } @@ -3228,7 +3396,7 @@ func traceGotConn(req *http.Request, cc *ClientConn, reused bool) { cc.mu.Lock() ci.WasIdle = len(cc.streams) == 0 && reused if ci.WasIdle && !cc.lastActive.IsZero() { - ci.IdleTime = time.Since(cc.lastActive) + ci.IdleTime = cc.t.timeSince(cc.lastActive) } cc.mu.Unlock() diff --git a/vendor/golang.org/x/net/http2/unencrypted.go b/vendor/golang.org/x/net/http2/unencrypted.go new file mode 100644 index 0000000000..b2de211613 --- /dev/null +++ b/vendor/golang.org/x/net/http2/unencrypted.go @@ -0,0 +1,32 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package http2 + +import ( + "crypto/tls" + "errors" + "net" +) + +const nextProtoUnencryptedHTTP2 = "unencrypted_http2" + +// unencryptedNetConnFromTLSConn retrieves a net.Conn wrapped in a *tls.Conn. +// +// TLSNextProto functions accept a *tls.Conn. +// +// When passing an unencrypted HTTP/2 connection to a TLSNextProto function, +// we pass a *tls.Conn with an underlying net.Conn containing the unencrypted connection. +// To be extra careful about mistakes (accidentally dropping TLS encryption in a place +// where we want it), the tls.Conn contains a net.Conn with an UnencryptedNetConn method +// that returns the actual connection we want to use. +func unencryptedNetConnFromTLSConn(tc *tls.Conn) (net.Conn, error) { + conner, ok := tc.NetConn().(interface { + UnencryptedNetConn() net.Conn + }) + if !ok { + return nil, errors.New("http2: TLS conn unexpectedly found in unencrypted handoff") + } + return conner.UnencryptedNetConn(), nil +} diff --git a/vendor/golang.org/x/sys/cpu/asm_darwin_x86_gc.s b/vendor/golang.org/x/sys/cpu/asm_darwin_x86_gc.s new file mode 100644 index 0000000000..ec2acfe540 --- /dev/null +++ b/vendor/golang.org/x/sys/cpu/asm_darwin_x86_gc.s @@ -0,0 +1,17 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build darwin && amd64 && gc + +#include "textflag.h" + +TEXT libc_sysctl_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_sysctl(SB) +GLOBL ·libc_sysctl_trampoline_addr(SB), RODATA, $8 +DATA ·libc_sysctl_trampoline_addr(SB)/8, $libc_sysctl_trampoline<>(SB) + +TEXT libc_sysctlbyname_trampoline<>(SB),NOSPLIT,$0-0 + JMP libc_sysctlbyname(SB) +GLOBL ·libc_sysctlbyname_trampoline_addr(SB), RODATA, $8 +DATA ·libc_sysctlbyname_trampoline_addr(SB)/8, $libc_sysctlbyname_trampoline<>(SB) diff --git a/vendor/golang.org/x/sys/cpu/cpu_darwin_x86.go b/vendor/golang.org/x/sys/cpu/cpu_darwin_x86.go new file mode 100644 index 0000000000..b838cb9e95 --- /dev/null +++ b/vendor/golang.org/x/sys/cpu/cpu_darwin_x86.go @@ -0,0 +1,61 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build darwin && amd64 && gc + +package cpu + +// darwinSupportsAVX512 checks Darwin kernel for AVX512 support via sysctl +// call (see issue 43089). It also restricts AVX512 support for Darwin to +// kernel version 21.3.0 (MacOS 12.2.0) or later (see issue 49233). +// +// Background: +// Darwin implements a special mechanism to economize on thread state when +// AVX512 specific registers are not in use. This scheme minimizes state when +// preempting threads that haven't yet used any AVX512 instructions, but adds +// special requirements to check for AVX512 hardware support at runtime (e.g. +// via sysctl call or commpage inspection). See issue 43089 and link below for +// full background: +// https://github.com/apple-oss-distributions/xnu/blob/xnu-11215.1.10/osfmk/i386/fpu.c#L214-L240 +// +// Additionally, all versions of the Darwin kernel from 19.6.0 through 21.2.0 +// (corresponding to MacOS 10.15.6 - 12.1) have a bug that can cause corruption +// of the AVX512 mask registers (K0-K7) upon signal return. For this reason +// AVX512 is considered unsafe to use on Darwin for kernel versions prior to +// 21.3.0, where a fix has been confirmed. See issue 49233 for full background. +func darwinSupportsAVX512() bool { + return darwinSysctlEnabled([]byte("hw.optional.avx512f\x00")) && darwinKernelVersionCheck(21, 3, 0) +} + +// Ensure Darwin kernel version is at least major.minor.patch, avoiding dependencies +func darwinKernelVersionCheck(major, minor, patch int) bool { + var release [256]byte + err := darwinOSRelease(&release) + if err != nil { + return false + } + + var mmp [3]int + c := 0 +Loop: + for _, b := range release[:] { + switch { + case b >= '0' && b <= '9': + mmp[c] = 10*mmp[c] + int(b-'0') + case b == '.': + c++ + if c > 2 { + return false + } + case b == 0: + break Loop + default: + return false + } + } + if c != 2 { + return false + } + return mmp[0] > major || mmp[0] == major && (mmp[1] > minor || mmp[1] == minor && mmp[2] >= patch) +} diff --git a/vendor/golang.org/x/sys/cpu/cpu_gc_x86.go b/vendor/golang.org/x/sys/cpu/cpu_gc_x86.go index 910728fb16..32a44514e2 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_gc_x86.go +++ b/vendor/golang.org/x/sys/cpu/cpu_gc_x86.go @@ -6,10 +6,10 @@ package cpu -// cpuid is implemented in cpu_x86.s for gc compiler +// cpuid is implemented in cpu_gc_x86.s for gc compiler // and in cpu_gccgo.c for gccgo. func cpuid(eaxArg, ecxArg uint32) (eax, ebx, ecx, edx uint32) -// xgetbv with ecx = 0 is implemented in cpu_x86.s for gc compiler +// xgetbv with ecx = 0 is implemented in cpu_gc_x86.s for gc compiler // and in cpu_gccgo.c for gccgo. func xgetbv() (eax, edx uint32) diff --git a/vendor/golang.org/x/sys/cpu/cpu_x86.s b/vendor/golang.org/x/sys/cpu/cpu_gc_x86.s similarity index 94% rename from vendor/golang.org/x/sys/cpu/cpu_x86.s rename to vendor/golang.org/x/sys/cpu/cpu_gc_x86.s index 7d7ba33efb..ce208ce6d6 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_x86.s +++ b/vendor/golang.org/x/sys/cpu/cpu_gc_x86.s @@ -18,7 +18,7 @@ TEXT ·cpuid(SB), NOSPLIT, $0-24 RET // func xgetbv() (eax, edx uint32) -TEXT ·xgetbv(SB),NOSPLIT,$0-8 +TEXT ·xgetbv(SB), NOSPLIT, $0-8 MOVL $0, CX XGETBV MOVL AX, eax+0(FP) diff --git a/vendor/golang.org/x/sys/cpu/cpu_gccgo_x86.go b/vendor/golang.org/x/sys/cpu/cpu_gccgo_x86.go index 99c60fe9f9..170d21ddfd 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_gccgo_x86.go +++ b/vendor/golang.org/x/sys/cpu/cpu_gccgo_x86.go @@ -23,9 +23,3 @@ func xgetbv() (eax, edx uint32) { gccgoXgetbv(&a, &d) return a, d } - -// gccgo doesn't build on Darwin, per: -// https://github.com/Homebrew/homebrew-core/blob/HEAD/Formula/gcc.rb#L76 -func darwinSupportsAVX512() bool { - return false -} diff --git a/vendor/golang.org/x/sys/cpu/cpu_linux_arm64.go b/vendor/golang.org/x/sys/cpu/cpu_linux_arm64.go index 08f35ea177..f1caf0f78e 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_linux_arm64.go +++ b/vendor/golang.org/x/sys/cpu/cpu_linux_arm64.go @@ -110,7 +110,6 @@ func doinit() { ARM64.HasASIMDFHM = isSet(hwCap, hwcap_ASIMDFHM) ARM64.HasDIT = isSet(hwCap, hwcap_DIT) - // HWCAP2 feature bits ARM64.HasSVE2 = isSet(hwCap2, hwcap2_SVE2) ARM64.HasI8MM = isSet(hwCap2, hwcap2_I8MM) diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go b/vendor/golang.org/x/sys/cpu/cpu_other_x86.go similarity index 50% rename from vendor/golang.org/x/tools/internal/versions/toolchain_go120.go rename to vendor/golang.org/x/sys/cpu/cpu_other_x86.go index 1a9efa126c..a0fd7e2f75 100644 --- a/vendor/golang.org/x/tools/internal/versions/toolchain_go120.go +++ b/vendor/golang.org/x/sys/cpu/cpu_other_x86.go @@ -2,13 +2,10 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.20 -// +build go1.20 +//go:build 386 || amd64p32 || (amd64 && (!darwin || !gc)) -package versions +package cpu -func init() { - if Compare(toolchain, Go1_20) < 0 { - toolchain = Go1_20 - } +func darwinSupportsAVX512() bool { + panic("only implemented for gc && amd64 && darwin") } diff --git a/vendor/golang.org/x/sys/cpu/cpu_x86.go b/vendor/golang.org/x/sys/cpu/cpu_x86.go index c29f5e4c5a..600a680786 100644 --- a/vendor/golang.org/x/sys/cpu/cpu_x86.go +++ b/vendor/golang.org/x/sys/cpu/cpu_x86.go @@ -92,10 +92,8 @@ func archInit() { osSupportsAVX = isSet(1, eax) && isSet(2, eax) if runtime.GOOS == "darwin" { - // Darwin doesn't save/restore AVX-512 mask registers correctly across signal handlers. - // Since users can't rely on mask register contents, let's not advertise AVX-512 support. - // See issue 49233. - osSupportsAVX512 = false + // Darwin requires special AVX512 checks, see cpu_darwin_x86.go + osSupportsAVX512 = osSupportsAVX && darwinSupportsAVX512() } else { // Check if OPMASK and ZMM registers have OS support. osSupportsAVX512 = osSupportsAVX && isSet(5, eax) && isSet(6, eax) && isSet(7, eax) diff --git a/vendor/golang.org/x/sys/cpu/syscall_darwin_x86_gc.go b/vendor/golang.org/x/sys/cpu/syscall_darwin_x86_gc.go new file mode 100644 index 0000000000..4d0888b0c0 --- /dev/null +++ b/vendor/golang.org/x/sys/cpu/syscall_darwin_x86_gc.go @@ -0,0 +1,98 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Minimal copy of x/sys/unix so the cpu package can make a +// system call on Darwin without depending on x/sys/unix. + +//go:build darwin && amd64 && gc + +package cpu + +import ( + "syscall" + "unsafe" +) + +type _C_int int32 + +// adapted from unix.Uname() at x/sys/unix/syscall_darwin.go L419 +func darwinOSRelease(release *[256]byte) error { + // from x/sys/unix/zerrors_openbsd_amd64.go + const ( + CTL_KERN = 0x1 + KERN_OSRELEASE = 0x2 + ) + + mib := []_C_int{CTL_KERN, KERN_OSRELEASE} + n := unsafe.Sizeof(*release) + + return sysctl(mib, &release[0], &n, nil, 0) +} + +type Errno = syscall.Errno + +var _zero uintptr // Single-word zero for use when we need a valid pointer to 0 bytes. + +// from x/sys/unix/zsyscall_darwin_amd64.go L791-807 +func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) error { + var _p0 unsafe.Pointer + if len(mib) > 0 { + _p0 = unsafe.Pointer(&mib[0]) + } else { + _p0 = unsafe.Pointer(&_zero) + } + if _, _, err := syscall_syscall6( + libc_sysctl_trampoline_addr, + uintptr(_p0), + uintptr(len(mib)), + uintptr(unsafe.Pointer(old)), + uintptr(unsafe.Pointer(oldlen)), + uintptr(unsafe.Pointer(new)), + uintptr(newlen), + ); err != 0 { + return err + } + + return nil +} + +var libc_sysctl_trampoline_addr uintptr + +// adapted from internal/cpu/cpu_arm64_darwin.go +func darwinSysctlEnabled(name []byte) bool { + out := int32(0) + nout := unsafe.Sizeof(out) + if ret := sysctlbyname(&name[0], (*byte)(unsafe.Pointer(&out)), &nout, nil, 0); ret != nil { + return false + } + return out > 0 +} + +//go:cgo_import_dynamic libc_sysctl sysctl "/usr/lib/libSystem.B.dylib" + +var libc_sysctlbyname_trampoline_addr uintptr + +// adapted from runtime/sys_darwin.go in the pattern of sysctl() above, as defined in x/sys/unix +func sysctlbyname(name *byte, old *byte, oldlen *uintptr, new *byte, newlen uintptr) error { + if _, _, err := syscall_syscall6( + libc_sysctlbyname_trampoline_addr, + uintptr(unsafe.Pointer(name)), + uintptr(unsafe.Pointer(old)), + uintptr(unsafe.Pointer(oldlen)), + uintptr(unsafe.Pointer(new)), + uintptr(newlen), + 0, + ); err != 0 { + return err + } + + return nil +} + +//go:cgo_import_dynamic libc_sysctlbyname sysctlbyname "/usr/lib/libSystem.B.dylib" + +// Implemented in the runtime package (runtime/sys_darwin.go) +func syscall_syscall6(fn, a1, a2, a3, a4, a5, a6 uintptr) (r1, r2 uintptr, err Errno) + +//go:linkname syscall_syscall6 syscall.syscall6 diff --git a/vendor/golang.org/x/sys/unix/ioctl_linux.go b/vendor/golang.org/x/sys/unix/ioctl_linux.go index dbe680eab8..7ca4fa12aa 100644 --- a/vendor/golang.org/x/sys/unix/ioctl_linux.go +++ b/vendor/golang.org/x/sys/unix/ioctl_linux.go @@ -58,6 +58,102 @@ func IoctlGetEthtoolDrvinfo(fd int, ifname string) (*EthtoolDrvinfo, error) { return &value, err } +// IoctlGetEthtoolTsInfo fetches ethtool timestamping and PHC +// association for the network device specified by ifname. +func IoctlGetEthtoolTsInfo(fd int, ifname string) (*EthtoolTsInfo, error) { + ifr, err := NewIfreq(ifname) + if err != nil { + return nil, err + } + + value := EthtoolTsInfo{Cmd: ETHTOOL_GET_TS_INFO} + ifrd := ifr.withData(unsafe.Pointer(&value)) + + err = ioctlIfreqData(fd, SIOCETHTOOL, &ifrd) + return &value, err +} + +// IoctlGetHwTstamp retrieves the hardware timestamping configuration +// for the network device specified by ifname. +func IoctlGetHwTstamp(fd int, ifname string) (*HwTstampConfig, error) { + ifr, err := NewIfreq(ifname) + if err != nil { + return nil, err + } + + value := HwTstampConfig{} + ifrd := ifr.withData(unsafe.Pointer(&value)) + + err = ioctlIfreqData(fd, SIOCGHWTSTAMP, &ifrd) + return &value, err +} + +// IoctlSetHwTstamp updates the hardware timestamping configuration for +// the network device specified by ifname. +func IoctlSetHwTstamp(fd int, ifname string, cfg *HwTstampConfig) error { + ifr, err := NewIfreq(ifname) + if err != nil { + return err + } + ifrd := ifr.withData(unsafe.Pointer(cfg)) + return ioctlIfreqData(fd, SIOCSHWTSTAMP, &ifrd) +} + +// FdToClockID derives the clock ID from the file descriptor number +// - see clock_gettime(3), FD_TO_CLOCKID macros. The resulting ID is +// suitable for system calls like ClockGettime. +func FdToClockID(fd int) int32 { return int32((int(^fd) << 3) | 3) } + +// IoctlPtpClockGetcaps returns the description of a given PTP device. +func IoctlPtpClockGetcaps(fd int) (*PtpClockCaps, error) { + var value PtpClockCaps + err := ioctlPtr(fd, PTP_CLOCK_GETCAPS2, unsafe.Pointer(&value)) + return &value, err +} + +// IoctlPtpSysOffsetPrecise returns a description of the clock +// offset compared to the system clock. +func IoctlPtpSysOffsetPrecise(fd int) (*PtpSysOffsetPrecise, error) { + var value PtpSysOffsetPrecise + err := ioctlPtr(fd, PTP_SYS_OFFSET_PRECISE2, unsafe.Pointer(&value)) + return &value, err +} + +// IoctlPtpSysOffsetExtended returns an extended description of the +// clock offset compared to the system clock. The samples parameter +// specifies the desired number of measurements. +func IoctlPtpSysOffsetExtended(fd int, samples uint) (*PtpSysOffsetExtended, error) { + value := PtpSysOffsetExtended{Samples: uint32(samples)} + err := ioctlPtr(fd, PTP_SYS_OFFSET_EXTENDED2, unsafe.Pointer(&value)) + return &value, err +} + +// IoctlPtpPinGetfunc returns the configuration of the specified +// I/O pin on given PTP device. +func IoctlPtpPinGetfunc(fd int, index uint) (*PtpPinDesc, error) { + value := PtpPinDesc{Index: uint32(index)} + err := ioctlPtr(fd, PTP_PIN_GETFUNC2, unsafe.Pointer(&value)) + return &value, err +} + +// IoctlPtpPinSetfunc updates configuration of the specified PTP +// I/O pin. +func IoctlPtpPinSetfunc(fd int, pd *PtpPinDesc) error { + return ioctlPtr(fd, PTP_PIN_SETFUNC2, unsafe.Pointer(pd)) +} + +// IoctlPtpPeroutRequest configures the periodic output mode of the +// PTP I/O pins. +func IoctlPtpPeroutRequest(fd int, r *PtpPeroutRequest) error { + return ioctlPtr(fd, PTP_PEROUT_REQUEST2, unsafe.Pointer(r)) +} + +// IoctlPtpExttsRequest configures the external timestamping mode +// of the PTP I/O pins. +func IoctlPtpExttsRequest(fd int, r *PtpExttsRequest) error { + return ioctlPtr(fd, PTP_EXTTS_REQUEST2, unsafe.Pointer(r)) +} + // IoctlGetWatchdogInfo fetches information about a watchdog device from the // Linux watchdog API. For more information, see: // https://www.kernel.org/doc/html/latest/watchdog/watchdog-api.html. diff --git a/vendor/golang.org/x/sys/unix/mkerrors.sh b/vendor/golang.org/x/sys/unix/mkerrors.sh index ac54ecaba0..6ab02b6c31 100644 --- a/vendor/golang.org/x/sys/unix/mkerrors.sh +++ b/vendor/golang.org/x/sys/unix/mkerrors.sh @@ -158,6 +158,16 @@ includes_Linux=' #endif #define _GNU_SOURCE +// See the description in unix/linux/types.go +#if defined(__ARM_EABI__) || \ + (defined(__mips__) && (_MIPS_SIM == _ABIO32)) || \ + (defined(__powerpc__) && (!defined(__powerpc64__))) +# ifdef _TIME_BITS +# undef _TIME_BITS +# endif +# define _TIME_BITS 32 +#endif + // is broken on powerpc64, as it fails to include definitions of // these structures. We just include them copied from . #if defined(__powerpc__) @@ -256,6 +266,7 @@ struct ltchars { #include #include #include +#include #include #include #include @@ -527,6 +538,7 @@ ccflags="$@" $2 ~ /^(AF|SOCK|SO|SOL|IPPROTO|IP|IPV6|TCP|MCAST|EVFILT|NOTE|SHUT|PROT|MAP|MREMAP|MFD|T?PACKET|MSG|SCM|MCL|DT|MADV|PR|LOCAL|TCPOPT|UDP)_/ || $2 ~ /^NFC_(GENL|PROTO|COMM|RF|SE|DIRECTION|LLCP|SOCKPROTO)_/ || $2 ~ /^NFC_.*_(MAX)?SIZE$/ || + $2 ~ /^PTP_/ || $2 ~ /^RAW_PAYLOAD_/ || $2 ~ /^[US]F_/ || $2 ~ /^TP_STATUS_/ || diff --git a/vendor/golang.org/x/sys/unix/syscall_linux.go b/vendor/golang.org/x/sys/unix/syscall_linux.go index f08abd434f..230a94549a 100644 --- a/vendor/golang.org/x/sys/unix/syscall_linux.go +++ b/vendor/golang.org/x/sys/unix/syscall_linux.go @@ -1860,6 +1860,7 @@ func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err e //sys ClockAdjtime(clockid int32, buf *Timex) (state int, err error) //sys ClockGetres(clockid int32, res *Timespec) (err error) //sys ClockGettime(clockid int32, time *Timespec) (err error) +//sys ClockSettime(clockid int32, time *Timespec) (err error) //sys ClockNanosleep(clockid int32, flags int, request *Timespec, remain *Timespec) (err error) //sys Close(fd int) (err error) //sys CloseRange(first uint, last uint, flags uint) (err error) diff --git a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go index 312ae6ac1d..7bf5c04bb0 100644 --- a/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/syscall_zos_s390x.go @@ -768,6 +768,15 @@ func Munmap(b []byte) (err error) { return mapper.Munmap(b) } +func MmapPtr(fd int, offset int64, addr unsafe.Pointer, length uintptr, prot int, flags int) (ret unsafe.Pointer, err error) { + xaddr, err := mapper.mmap(uintptr(addr), length, prot, flags, fd, offset) + return unsafe.Pointer(xaddr), err +} + +func MunmapPtr(addr unsafe.Pointer, length uintptr) (err error) { + return mapper.munmap(uintptr(addr), length) +} + //sys Gethostname(buf []byte) (err error) = SYS___GETHOSTNAME_A //sysnb Getgid() (gid int) //sysnb Getpid() (pid int) @@ -816,10 +825,10 @@ func Lstat(path string, stat *Stat_t) (err error) { // for checking symlinks begins with $VERSION/ $SYSNAME/ $SYSSYMR/ $SYSSYMA/ func isSpecialPath(path []byte) (v bool) { var special = [4][8]byte{ - [8]byte{'V', 'E', 'R', 'S', 'I', 'O', 'N', '/'}, - [8]byte{'S', 'Y', 'S', 'N', 'A', 'M', 'E', '/'}, - [8]byte{'S', 'Y', 'S', 'S', 'Y', 'M', 'R', '/'}, - [8]byte{'S', 'Y', 'S', 'S', 'Y', 'M', 'A', '/'}} + {'V', 'E', 'R', 'S', 'I', 'O', 'N', '/'}, + {'S', 'Y', 'S', 'N', 'A', 'M', 'E', '/'}, + {'S', 'Y', 'S', 'S', 'Y', 'M', 'R', '/'}, + {'S', 'Y', 'S', 'S', 'Y', 'M', 'A', '/'}} var i, j int for i = 0; i < len(special); i++ { @@ -3115,3 +3124,90 @@ func legacy_Mkfifoat(dirfd int, path string, mode uint32) (err error) { //sys Posix_openpt(oflag int) (fd int, err error) = SYS_POSIX_OPENPT //sys Grantpt(fildes int) (rc int, err error) = SYS_GRANTPT //sys Unlockpt(fildes int) (rc int, err error) = SYS_UNLOCKPT + +func fcntlAsIs(fd uintptr, cmd int, arg uintptr) (val int, err error) { + runtime.EnterSyscall() + r0, e2, e1 := CallLeFuncWithErr(GetZosLibVec()+SYS_FCNTL<<4, uintptr(fd), uintptr(cmd), arg) + runtime.ExitSyscall() + val = int(r0) + if int64(r0) == -1 { + err = errnoErr2(e1, e2) + } + return +} + +func Fcntl(fd uintptr, cmd int, op interface{}) (ret int, err error) { + switch op.(type) { + case *Flock_t: + err = FcntlFlock(fd, cmd, op.(*Flock_t)) + if err != nil { + ret = -1 + } + return + case int: + return FcntlInt(fd, cmd, op.(int)) + case *F_cnvrt: + return fcntlAsIs(fd, cmd, uintptr(unsafe.Pointer(op.(*F_cnvrt)))) + case unsafe.Pointer: + return fcntlAsIs(fd, cmd, uintptr(op.(unsafe.Pointer))) + default: + return -1, EINVAL + } + return +} + +func Sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { + if raceenabled { + raceReleaseMerge(unsafe.Pointer(&ioSync)) + } + return sendfile(outfd, infd, offset, count) +} + +func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) { + // TODO: use LE call instead if the call is implemented + originalOffset, err := Seek(infd, 0, SEEK_CUR) + if err != nil { + return -1, err + } + //start reading data from in_fd + if offset != nil { + _, err := Seek(infd, *offset, SEEK_SET) + if err != nil { + return -1, err + } + } + + buf := make([]byte, count) + readBuf := make([]byte, 0) + var n int = 0 + for i := 0; i < count; i += n { + n, err := Read(infd, buf) + if n == 0 { + if err != nil { + return -1, err + } else { // EOF + break + } + } + readBuf = append(readBuf, buf...) + buf = buf[0:0] + } + + n2, err := Write(outfd, readBuf) + if err != nil { + return -1, err + } + + //When sendfile() returns, this variable will be set to the + // offset of the byte following the last byte that was read. + if offset != nil { + *offset = *offset + int64(n) + // If offset is not NULL, then sendfile() does not modify the file + // offset of in_fd + _, err := Seek(infd, originalOffset, SEEK_SET) + if err != nil { + return -1, err + } + } + return n2, nil +} diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux.go b/vendor/golang.org/x/sys/unix/zerrors_linux.go index de3b462489..ccba391c9f 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux.go @@ -2625,6 +2625,28 @@ const ( PR_UNALIGN_NOPRINT = 0x1 PR_UNALIGN_SIGBUS = 0x2 PSTOREFS_MAGIC = 0x6165676c + PTP_CLK_MAGIC = '=' + PTP_ENABLE_FEATURE = 0x1 + PTP_EXTTS_EDGES = 0x6 + PTP_EXTTS_EVENT_VALID = 0x1 + PTP_EXTTS_V1_VALID_FLAGS = 0x7 + PTP_EXTTS_VALID_FLAGS = 0x1f + PTP_EXT_OFFSET = 0x10 + PTP_FALLING_EDGE = 0x4 + PTP_MAX_SAMPLES = 0x19 + PTP_PEROUT_DUTY_CYCLE = 0x2 + PTP_PEROUT_ONE_SHOT = 0x1 + PTP_PEROUT_PHASE = 0x4 + PTP_PEROUT_V1_VALID_FLAGS = 0x0 + PTP_PEROUT_VALID_FLAGS = 0x7 + PTP_PIN_GETFUNC = 0xc0603d06 + PTP_PIN_GETFUNC2 = 0xc0603d0f + PTP_RISING_EDGE = 0x2 + PTP_STRICT_FLAGS = 0x8 + PTP_SYS_OFFSET_EXTENDED = 0xc4c03d09 + PTP_SYS_OFFSET_EXTENDED2 = 0xc4c03d12 + PTP_SYS_OFFSET_PRECISE = 0xc0403d08 + PTP_SYS_OFFSET_PRECISE2 = 0xc0403d11 PTRACE_ATTACH = 0x10 PTRACE_CONT = 0x7 PTRACE_DETACH = 0x11 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go index 8aa6d77c01..0c00cb3f3a 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_386.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_386.go @@ -237,6 +237,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_GETFPREGS = 0xe PTRACE_GETFPXREGS = 0x12 PTRACE_GET_THREAD_AREA = 0x19 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go index da428f4253..dfb364554d 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_amd64.go @@ -237,6 +237,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_ARCH_PRCTL = 0x1e PTRACE_GETFPREGS = 0xe PTRACE_GETFPXREGS = 0x12 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go index bf45bfec78..d46dcf78ab 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_GETCRUNCHREGS = 0x19 PTRACE_GETFDPIC = 0x1f PTRACE_GETFDPIC_EXEC = 0x0 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go index 71c67162b7..3af3248a7f 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_arm64.go @@ -240,6 +240,20 @@ const ( PROT_BTI = 0x10 PROT_MTE = 0x20 PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_PEEKMTETAGS = 0x21 PTRACE_POKEMTETAGS = 0x22 PTRACE_SYSEMU = 0x1f diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go index 9476628fa0..292bcf0283 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_loong64.go @@ -238,6 +238,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_SYSEMU = 0x1f PTRACE_SYSEMU_SINGLESTEP = 0x20 RLIMIT_AS = 0x9 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go index b9e85f3cf0..782b7110fa 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x20007434 PPPIOCXFERUNIT = 0x2000744e PR_SET_PTRACER_ANY = 0xffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETFPREGS = 0xe PTRACE_GET_THREAD_AREA = 0x19 PTRACE_GET_THREAD_AREA_3264 = 0xc4 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go index a48b68a764..84973fd927 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x20007434 PPPIOCXFERUNIT = 0x2000744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETFPREGS = 0xe PTRACE_GET_THREAD_AREA = 0x19 PTRACE_GET_THREAD_AREA_3264 = 0xc4 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go index ea00e8522a..6d9cbc3b27 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mips64le.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x20007434 PPPIOCXFERUNIT = 0x2000744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETFPREGS = 0xe PTRACE_GET_THREAD_AREA = 0x19 PTRACE_GET_THREAD_AREA_3264 = 0xc4 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go index 91c6468717..5f9fedbce0 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_mipsle.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x20007434 PPPIOCXFERUNIT = 0x2000744e PR_SET_PTRACER_ANY = 0xffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETFPREGS = 0xe PTRACE_GET_THREAD_AREA = 0x19 PTRACE_GET_THREAD_AREA_3264 = 0xc4 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go index 8cbf38d639..bb0026ee0c 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc.go @@ -237,6 +237,20 @@ const ( PPPIOCXFERUNIT = 0x2000744e PROT_SAO = 0x10 PR_SET_PTRACER_ANY = 0xffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETEVRREGS = 0x14 PTRACE_GETFPREGS = 0xe PTRACE_GETREGS64 = 0x16 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go index a2df734191..46120db5c9 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64.go @@ -237,6 +237,20 @@ const ( PPPIOCXFERUNIT = 0x2000744e PROT_SAO = 0x10 PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETEVRREGS = 0x14 PTRACE_GETFPREGS = 0xe PTRACE_GETREGS64 = 0x16 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go index 2479137923..5c951634fb 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_ppc64le.go @@ -237,6 +237,20 @@ const ( PPPIOCXFERUNIT = 0x2000744e PROT_SAO = 0x10 PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETEVRREGS = 0x14 PTRACE_GETFPREGS = 0xe PTRACE_GETREGS64 = 0x16 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go index d265f146ee..11a84d5af2 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_riscv64.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_GETFDPIC = 0x21 PTRACE_GETFDPIC_EXEC = 0x0 PTRACE_GETFDPIC_INTERP = 0x1 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go index 3f2d644396..f78c4617ca 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_s390x.go @@ -234,6 +234,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x7434 PPPIOCXFERUNIT = 0x744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x80503d01 + PTP_CLOCK_GETCAPS2 = 0x80503d0a + PTP_ENABLE_PPS = 0x40043d04 + PTP_ENABLE_PPS2 = 0x40043d0d + PTP_EXTTS_REQUEST = 0x40103d02 + PTP_EXTTS_REQUEST2 = 0x40103d0b + PTP_MASK_CLEAR_ALL = 0x3d13 + PTP_MASK_EN_SINGLE = 0x40043d14 + PTP_PEROUT_REQUEST = 0x40383d03 + PTP_PEROUT_REQUEST2 = 0x40383d0c + PTP_PIN_SETFUNC = 0x40603d07 + PTP_PIN_SETFUNC2 = 0x40603d10 + PTP_SYS_OFFSET = 0x43403d05 + PTP_SYS_OFFSET2 = 0x43403d0e PTRACE_DISABLE_TE = 0x5010 PTRACE_ENABLE_TE = 0x5009 PTRACE_GET_LAST_BREAK = 0x5006 diff --git a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go index 5d8b727a1c..aeb777c344 100644 --- a/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go +++ b/vendor/golang.org/x/sys/unix/zerrors_linux_sparc64.go @@ -239,6 +239,20 @@ const ( PPPIOCUNBRIDGECHAN = 0x20007434 PPPIOCXFERUNIT = 0x2000744e PR_SET_PTRACER_ANY = 0xffffffffffffffff + PTP_CLOCK_GETCAPS = 0x40503d01 + PTP_CLOCK_GETCAPS2 = 0x40503d0a + PTP_ENABLE_PPS = 0x80043d04 + PTP_ENABLE_PPS2 = 0x80043d0d + PTP_EXTTS_REQUEST = 0x80103d02 + PTP_EXTTS_REQUEST2 = 0x80103d0b + PTP_MASK_CLEAR_ALL = 0x20003d13 + PTP_MASK_EN_SINGLE = 0x80043d14 + PTP_PEROUT_REQUEST = 0x80383d03 + PTP_PEROUT_REQUEST2 = 0x80383d0c + PTP_PIN_SETFUNC = 0x80603d07 + PTP_PIN_SETFUNC2 = 0x80603d10 + PTP_SYS_OFFSET = 0x83403d05 + PTP_SYS_OFFSET2 = 0x83403d0e PTRACE_GETFPAREGS = 0x14 PTRACE_GETFPREGS = 0xe PTRACE_GETFPREGS64 = 0x19 diff --git a/vendor/golang.org/x/sys/unix/zsyscall_linux.go b/vendor/golang.org/x/sys/unix/zsyscall_linux.go index af30da5578..5cc1e8eb2f 100644 --- a/vendor/golang.org/x/sys/unix/zsyscall_linux.go +++ b/vendor/golang.org/x/sys/unix/zsyscall_linux.go @@ -592,6 +592,16 @@ func ClockGettime(clockid int32, time *Timespec) (err error) { // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT +func ClockSettime(clockid int32, time *Timespec) (err error) { + _, _, e1 := Syscall(SYS_CLOCK_SETTIME, uintptr(clockid), uintptr(unsafe.Pointer(time)), 0) + if e1 != 0 { + err = errnoErr(e1) + } + return +} + +// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT + func ClockNanosleep(clockid int32, flags int, request *Timespec, remain *Timespec) (err error) { _, _, e1 := Syscall6(SYS_CLOCK_NANOSLEEP, uintptr(clockid), uintptr(flags), uintptr(unsafe.Pointer(request)), uintptr(unsafe.Pointer(remain)), 0, 0) if e1 != 0 { diff --git a/vendor/golang.org/x/sys/unix/ztypes_linux.go b/vendor/golang.org/x/sys/unix/ztypes_linux.go index 3a69e45496..8daaf3faf4 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_linux.go +++ b/vendor/golang.org/x/sys/unix/ztypes_linux.go @@ -1752,12 +1752,6 @@ const ( IFLA_IPVLAN_UNSPEC = 0x0 IFLA_IPVLAN_MODE = 0x1 IFLA_IPVLAN_FLAGS = 0x2 - NETKIT_NEXT = -0x1 - NETKIT_PASS = 0x0 - NETKIT_DROP = 0x2 - NETKIT_REDIRECT = 0x7 - NETKIT_L2 = 0x0 - NETKIT_L3 = 0x1 IFLA_NETKIT_UNSPEC = 0x0 IFLA_NETKIT_PEER_INFO = 0x1 IFLA_NETKIT_PRIMARY = 0x2 @@ -1796,6 +1790,7 @@ const ( IFLA_VXLAN_DF = 0x1d IFLA_VXLAN_VNIFILTER = 0x1e IFLA_VXLAN_LOCALBYPASS = 0x1f + IFLA_VXLAN_LABEL_POLICY = 0x20 IFLA_GENEVE_UNSPEC = 0x0 IFLA_GENEVE_ID = 0x1 IFLA_GENEVE_REMOTE = 0x2 @@ -1825,6 +1820,8 @@ const ( IFLA_GTP_ROLE = 0x4 IFLA_GTP_CREATE_SOCKETS = 0x5 IFLA_GTP_RESTART_COUNT = 0x6 + IFLA_GTP_LOCAL = 0x7 + IFLA_GTP_LOCAL6 = 0x8 IFLA_BOND_UNSPEC = 0x0 IFLA_BOND_MODE = 0x1 IFLA_BOND_ACTIVE_SLAVE = 0x2 @@ -1857,6 +1854,7 @@ const ( IFLA_BOND_AD_LACP_ACTIVE = 0x1d IFLA_BOND_MISSED_MAX = 0x1e IFLA_BOND_NS_IP6_TARGET = 0x1f + IFLA_BOND_COUPLED_CONTROL = 0x20 IFLA_BOND_AD_INFO_UNSPEC = 0x0 IFLA_BOND_AD_INFO_AGGREGATOR = 0x1 IFLA_BOND_AD_INFO_NUM_PORTS = 0x2 @@ -1925,6 +1923,7 @@ const ( IFLA_HSR_SEQ_NR = 0x5 IFLA_HSR_VERSION = 0x6 IFLA_HSR_PROTOCOL = 0x7 + IFLA_HSR_INTERLINK = 0x8 IFLA_STATS_UNSPEC = 0x0 IFLA_STATS_LINK_64 = 0x1 IFLA_STATS_LINK_XSTATS = 0x2 @@ -1977,6 +1976,15 @@ const ( IFLA_DSA_MASTER = 0x1 ) +const ( + NETKIT_NEXT = -0x1 + NETKIT_PASS = 0x0 + NETKIT_DROP = 0x2 + NETKIT_REDIRECT = 0x7 + NETKIT_L2 = 0x0 + NETKIT_L3 = 0x1 +) + const ( NF_INET_PRE_ROUTING = 0x0 NF_INET_LOCAL_IN = 0x1 @@ -4110,6 +4118,106 @@ type EthtoolDrvinfo struct { Regdump_len uint32 } +type EthtoolTsInfo struct { + Cmd uint32 + So_timestamping uint32 + Phc_index int32 + Tx_types uint32 + Tx_reserved [3]uint32 + Rx_filters uint32 + Rx_reserved [3]uint32 +} + +type HwTstampConfig struct { + Flags int32 + Tx_type int32 + Rx_filter int32 +} + +const ( + HWTSTAMP_FILTER_NONE = 0x0 + HWTSTAMP_FILTER_ALL = 0x1 + HWTSTAMP_FILTER_SOME = 0x2 + HWTSTAMP_FILTER_PTP_V1_L4_EVENT = 0x3 + HWTSTAMP_FILTER_PTP_V2_L4_EVENT = 0x6 + HWTSTAMP_FILTER_PTP_V2_L2_EVENT = 0x9 + HWTSTAMP_FILTER_PTP_V2_EVENT = 0xc +) + +const ( + HWTSTAMP_TX_OFF = 0x0 + HWTSTAMP_TX_ON = 0x1 + HWTSTAMP_TX_ONESTEP_SYNC = 0x2 +) + +type ( + PtpClockCaps struct { + Max_adj int32 + N_alarm int32 + N_ext_ts int32 + N_per_out int32 + Pps int32 + N_pins int32 + Cross_timestamping int32 + Adjust_phase int32 + Max_phase_adj int32 + Rsv [11]int32 + } + PtpClockTime struct { + Sec int64 + Nsec uint32 + Reserved uint32 + } + PtpExttsEvent struct { + T PtpClockTime + Index uint32 + Flags uint32 + Rsv [2]uint32 + } + PtpExttsRequest struct { + Index uint32 + Flags uint32 + Rsv [2]uint32 + } + PtpPeroutRequest struct { + StartOrPhase PtpClockTime + Period PtpClockTime + Index uint32 + Flags uint32 + On PtpClockTime + } + PtpPinDesc struct { + Name [64]byte + Index uint32 + Func uint32 + Chan uint32 + Rsv [5]uint32 + } + PtpSysOffset struct { + Samples uint32 + Rsv [3]uint32 + Ts [51]PtpClockTime + } + PtpSysOffsetExtended struct { + Samples uint32 + Rsv [3]uint32 + Ts [25][3]PtpClockTime + } + PtpSysOffsetPrecise struct { + Device PtpClockTime + Realtime PtpClockTime + Monoraw PtpClockTime + Rsv [4]uint32 + } +) + +const ( + PTP_PF_NONE = 0x0 + PTP_PF_EXTTS = 0x1 + PTP_PF_PEROUT = 0x2 + PTP_PF_PHYSYNC = 0x3 +) + type ( HIDRawReportDescriptor struct { Size uint32 diff --git a/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go b/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go index d9a13af468..2e5d5a4435 100644 --- a/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go +++ b/vendor/golang.org/x/sys/unix/ztypes_zos_s390x.go @@ -377,6 +377,12 @@ type Flock_t struct { Pid int32 } +type F_cnvrt struct { + Cvtcmd int32 + Pccsid int16 + Fccsid int16 +} + type Termios struct { Cflag uint32 Iflag uint32 diff --git a/vendor/golang.org/x/sys/windows/syscall_windows.go b/vendor/golang.org/x/sys/windows/syscall_windows.go index 5cee9a3143..4510bfc3f5 100644 --- a/vendor/golang.org/x/sys/windows/syscall_windows.go +++ b/vendor/golang.org/x/sys/windows/syscall_windows.go @@ -725,20 +725,12 @@ func DurationSinceBoot() time.Duration { } func Ftruncate(fd Handle, length int64) (err error) { - curoffset, e := Seek(fd, 0, 1) - if e != nil { - return e - } - defer Seek(fd, curoffset, 0) - _, e = Seek(fd, length, 0) - if e != nil { - return e + type _FILE_END_OF_FILE_INFO struct { + EndOfFile int64 } - e = SetEndOfFile(fd) - if e != nil { - return e - } - return nil + var info _FILE_END_OF_FILE_INFO + info.EndOfFile = length + return SetFileInformationByHandle(fd, FileEndOfFileInfo, (*byte)(unsafe.Pointer(&info)), uint32(unsafe.Sizeof(info))) } func Gettimeofday(tv *Timeval) (err error) { @@ -894,6 +886,11 @@ const socket_error = uintptr(^uint32(0)) //sys GetACP() (acp uint32) = kernel32.GetACP //sys MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) = kernel32.MultiByteToWideChar //sys getBestInterfaceEx(sockaddr unsafe.Pointer, pdwBestIfIndex *uint32) (errcode error) = iphlpapi.GetBestInterfaceEx +//sys GetIfEntry2Ex(level uint32, row *MibIfRow2) (errcode error) = iphlpapi.GetIfEntry2Ex +//sys GetUnicastIpAddressEntry(row *MibUnicastIpAddressRow) (errcode error) = iphlpapi.GetUnicastIpAddressEntry +//sys NotifyIpInterfaceChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) = iphlpapi.NotifyIpInterfaceChange +//sys NotifyUnicastIpAddressChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) = iphlpapi.NotifyUnicastIpAddressChange +//sys CancelMibChangeNotify2(notificationHandle Handle) (errcode error) = iphlpapi.CancelMibChangeNotify2 // For testing: clients can set this flag to force // creation of IPv6 sockets to return EAFNOSUPPORT. @@ -1685,13 +1682,16 @@ func (s NTStatus) Error() string { // do not use NTUnicodeString, and instead UTF16PtrFromString should be used for // the more common *uint16 string type. func NewNTUnicodeString(s string) (*NTUnicodeString, error) { - var u NTUnicodeString - s16, err := UTF16PtrFromString(s) + s16, err := UTF16FromString(s) if err != nil { return nil, err } - RtlInitUnicodeString(&u, s16) - return &u, nil + n := uint16(len(s16) * 2) + return &NTUnicodeString{ + Length: n - 2, // subtract 2 bytes for the NULL terminator + MaximumLength: n, + Buffer: &s16[0], + }, nil } // Slice returns a uint16 slice that aliases the data in the NTUnicodeString. diff --git a/vendor/golang.org/x/sys/windows/types_windows.go b/vendor/golang.org/x/sys/windows/types_windows.go index 7b97a154c9..51311e205f 100644 --- a/vendor/golang.org/x/sys/windows/types_windows.go +++ b/vendor/golang.org/x/sys/windows/types_windows.go @@ -2203,6 +2203,132 @@ const ( IfOperStatusLowerLayerDown = 7 ) +const ( + IF_MAX_PHYS_ADDRESS_LENGTH = 32 + IF_MAX_STRING_SIZE = 256 +) + +// MIB_IF_ENTRY_LEVEL enumeration from netioapi.h or +// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/nf-netioapi-getifentry2ex. +const ( + MibIfEntryNormal = 0 + MibIfEntryNormalWithoutStatistics = 2 +) + +// MIB_NOTIFICATION_TYPE enumeration from netioapi.h or +// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ne-netioapi-mib_notification_type. +const ( + MibParameterNotification = 0 + MibAddInstance = 1 + MibDeleteInstance = 2 + MibInitialNotification = 3 +) + +// MibIfRow2 stores information about a particular interface. See +// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_if_row2. +type MibIfRow2 struct { + InterfaceLuid uint64 + InterfaceIndex uint32 + InterfaceGuid GUID + Alias [IF_MAX_STRING_SIZE + 1]uint16 + Description [IF_MAX_STRING_SIZE + 1]uint16 + PhysicalAddressLength uint32 + PhysicalAddress [IF_MAX_PHYS_ADDRESS_LENGTH]uint8 + PermanentPhysicalAddress [IF_MAX_PHYS_ADDRESS_LENGTH]uint8 + Mtu uint32 + Type uint32 + TunnelType uint32 + MediaType uint32 + PhysicalMediumType uint32 + AccessType uint32 + DirectionType uint32 + InterfaceAndOperStatusFlags uint8 + OperStatus uint32 + AdminStatus uint32 + MediaConnectState uint32 + NetworkGuid GUID + ConnectionType uint32 + TransmitLinkSpeed uint64 + ReceiveLinkSpeed uint64 + InOctets uint64 + InUcastPkts uint64 + InNUcastPkts uint64 + InDiscards uint64 + InErrors uint64 + InUnknownProtos uint64 + InUcastOctets uint64 + InMulticastOctets uint64 + InBroadcastOctets uint64 + OutOctets uint64 + OutUcastPkts uint64 + OutNUcastPkts uint64 + OutDiscards uint64 + OutErrors uint64 + OutUcastOctets uint64 + OutMulticastOctets uint64 + OutBroadcastOctets uint64 + OutQLen uint64 +} + +// MIB_UNICASTIPADDRESS_ROW stores information about a unicast IP address. See +// https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_unicastipaddress_row. +type MibUnicastIpAddressRow struct { + Address RawSockaddrInet6 // SOCKADDR_INET union + InterfaceLuid uint64 + InterfaceIndex uint32 + PrefixOrigin uint32 + SuffixOrigin uint32 + ValidLifetime uint32 + PreferredLifetime uint32 + OnLinkPrefixLength uint8 + SkipAsSource uint8 + DadState uint32 + ScopeId uint32 + CreationTimeStamp Filetime +} + +const ScopeLevelCount = 16 + +// MIB_IPINTERFACE_ROW stores interface management information for a particular IP address family on a network interface. +// See https://learn.microsoft.com/en-us/windows/win32/api/netioapi/ns-netioapi-mib_ipinterface_row. +type MibIpInterfaceRow struct { + Family uint16 + InterfaceLuid uint64 + InterfaceIndex uint32 + MaxReassemblySize uint32 + InterfaceIdentifier uint64 + MinRouterAdvertisementInterval uint32 + MaxRouterAdvertisementInterval uint32 + AdvertisingEnabled uint8 + ForwardingEnabled uint8 + WeakHostSend uint8 + WeakHostReceive uint8 + UseAutomaticMetric uint8 + UseNeighborUnreachabilityDetection uint8 + ManagedAddressConfigurationSupported uint8 + OtherStatefulConfigurationSupported uint8 + AdvertiseDefaultRoute uint8 + RouterDiscoveryBehavior uint32 + DadTransmits uint32 + BaseReachableTime uint32 + RetransmitTime uint32 + PathMtuDiscoveryTimeout uint32 + LinkLocalAddressBehavior uint32 + LinkLocalAddressTimeout uint32 + ZoneIndices [ScopeLevelCount]uint32 + SitePrefixLength uint32 + Metric uint32 + NlMtu uint32 + Connected uint8 + SupportsWakeUpPatterns uint8 + SupportsNeighborDiscovery uint8 + SupportsRouterDiscovery uint8 + ReachableTime uint32 + TransmitOffload uint32 + ReceiveOffload uint32 + DisableDefaultRoutes uint8 +} + // Console related constants used for the mode parameter to SetConsoleMode. See // https://docs.microsoft.com/en-us/windows/console/setconsolemode for details. diff --git a/vendor/golang.org/x/sys/windows/zsyscall_windows.go b/vendor/golang.org/x/sys/windows/zsyscall_windows.go index 4c2e1bdc01..6f5252880c 100644 --- a/vendor/golang.org/x/sys/windows/zsyscall_windows.go +++ b/vendor/golang.org/x/sys/windows/zsyscall_windows.go @@ -181,10 +181,15 @@ var ( procDnsRecordListFree = moddnsapi.NewProc("DnsRecordListFree") procDwmGetWindowAttribute = moddwmapi.NewProc("DwmGetWindowAttribute") procDwmSetWindowAttribute = moddwmapi.NewProc("DwmSetWindowAttribute") + procCancelMibChangeNotify2 = modiphlpapi.NewProc("CancelMibChangeNotify2") procGetAdaptersAddresses = modiphlpapi.NewProc("GetAdaptersAddresses") procGetAdaptersInfo = modiphlpapi.NewProc("GetAdaptersInfo") procGetBestInterfaceEx = modiphlpapi.NewProc("GetBestInterfaceEx") procGetIfEntry = modiphlpapi.NewProc("GetIfEntry") + procGetIfEntry2Ex = modiphlpapi.NewProc("GetIfEntry2Ex") + procGetUnicastIpAddressEntry = modiphlpapi.NewProc("GetUnicastIpAddressEntry") + procNotifyIpInterfaceChange = modiphlpapi.NewProc("NotifyIpInterfaceChange") + procNotifyUnicastIpAddressChange = modiphlpapi.NewProc("NotifyUnicastIpAddressChange") procAddDllDirectory = modkernel32.NewProc("AddDllDirectory") procAssignProcessToJobObject = modkernel32.NewProc("AssignProcessToJobObject") procCancelIo = modkernel32.NewProc("CancelIo") @@ -1606,6 +1611,14 @@ func DwmSetWindowAttribute(hwnd HWND, attribute uint32, value unsafe.Pointer, si return } +func CancelMibChangeNotify2(notificationHandle Handle) (errcode error) { + r0, _, _ := syscall.SyscallN(procCancelMibChangeNotify2.Addr(), uintptr(notificationHandle)) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + func GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) { r0, _, _ := syscall.Syscall6(procGetAdaptersAddresses.Addr(), 5, uintptr(family), uintptr(flags), uintptr(reserved), uintptr(unsafe.Pointer(adapterAddresses)), uintptr(unsafe.Pointer(sizePointer)), 0) if r0 != 0 { @@ -1638,6 +1651,46 @@ func GetIfEntry(pIfRow *MibIfRow) (errcode error) { return } +func GetIfEntry2Ex(level uint32, row *MibIfRow2) (errcode error) { + r0, _, _ := syscall.SyscallN(procGetIfEntry2Ex.Addr(), uintptr(level), uintptr(unsafe.Pointer(row))) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + +func GetUnicastIpAddressEntry(row *MibUnicastIpAddressRow) (errcode error) { + r0, _, _ := syscall.SyscallN(procGetUnicastIpAddressEntry.Addr(), uintptr(unsafe.Pointer(row))) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + +func NotifyIpInterfaceChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) { + var _p0 uint32 + if initialNotification { + _p0 = 1 + } + r0, _, _ := syscall.SyscallN(procNotifyIpInterfaceChange.Addr(), uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle))) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + +func NotifyUnicastIpAddressChange(family uint16, callback uintptr, callerContext unsafe.Pointer, initialNotification bool, notificationHandle *Handle) (errcode error) { + var _p0 uint32 + if initialNotification { + _p0 = 1 + } + r0, _, _ := syscall.SyscallN(procNotifyUnicastIpAddressChange.Addr(), uintptr(family), uintptr(callback), uintptr(callerContext), uintptr(_p0), uintptr(unsafe.Pointer(notificationHandle))) + if r0 != 0 { + errcode = syscall.Errno(r0) + } + return +} + func AddDllDirectory(path *uint16) (cookie uintptr, err error) { r0, _, e1 := syscall.Syscall(procAddDllDirectory.Addr(), 1, uintptr(unsafe.Pointer(path)), 0, 0) cookie = uintptr(r0) diff --git a/vendor/golang.org/x/term/README.md b/vendor/golang.org/x/term/README.md index d03d0aefef..05ff623f94 100644 --- a/vendor/golang.org/x/term/README.md +++ b/vendor/golang.org/x/term/README.md @@ -4,16 +4,13 @@ This repository provides Go terminal and console support packages. -## Download/Install - -The easiest way to install is to run `go get -u golang.org/x/term`. You can -also manually git clone the repository to `$GOPATH/src/golang.org/x/term`. - ## Report Issues / Send Patches This repository uses Gerrit for code changes. To learn how to submit changes to -this repository, see https://golang.org/doc/contribute.html. +this repository, see https://go.dev/doc/contribute. + +The git repository is https://go.googlesource.com/term. The main issue tracker for the term repository is located at -https://github.com/golang/go/issues. Prefix your issue with "x/term:" in the +https://go.dev/issues. Prefix your issue with "x/term:" in the subject line, so it is easy to find. diff --git a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go index c9ba1a375d..b622dfdf3a 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go @@ -57,6 +57,8 @@ type asmArch struct { // include the first integer register and first floating-point register. Accessing // any of them counts as writing to result. retRegs []string + // writeResult is a list of instructions that will change result register implicity. + writeResult []string // calculated during initialization sizes types.Sizes intSize int @@ -85,18 +87,18 @@ type asmVar struct { var ( asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false} asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true} - asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}} - asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}} + asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}, writeResult: []string{"SVC"}} + asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}, writeResult: []string{"SYSCALL"}} asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true} asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true} asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true} asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true} - asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}} - asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}} + asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}} + asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}} + asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}, writeResult: []string{"ECALL"}} asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true} asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false} - asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}} + asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}, writeResult: []string{"SYSCALL"}} arches = []*asmArch{ &asmArch386, @@ -351,6 +353,12 @@ Files: } if abi == "ABIInternal" && !haveRetArg { + for _, ins := range archDef.writeResult { + if strings.Contains(line, ins) { + haveRetArg = true + break + } + } for _, reg := range archDef.retRegs { if strings.Contains(line, reg) { haveRetArg = true diff --git a/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go b/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go index 3bfd501226..0d95fefcb5 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/assign/assign.go @@ -18,7 +18,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" ) @@ -78,7 +77,7 @@ func run(pass *analysis.Pass) (interface{}, error) { // isMapIndex returns true if e is a map index expression. func isMapIndex(info *types.Info, e ast.Expr) bool { - if idx, ok := astutil.Unparen(e).(*ast.IndexExpr); ok { + if idx, ok := ast.Unparen(e).(*ast.IndexExpr); ok { if typ := info.Types[idx.X].Type; typ != nil { _, ok := typ.Underlying().(*types.Map) return ok diff --git a/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go b/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go index 564329774e..8cec6e8224 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go @@ -14,7 +14,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" ) @@ -169,7 +168,7 @@ func (op boolOp) checkSuspect(pass *analysis.Pass, exprs []ast.Expr) { // seen[e] is already true; any newly processed exprs are added to seen. func (op boolOp) split(e ast.Expr, seen map[*ast.BinaryExpr]bool) (exprs []ast.Expr) { for { - e = astutil.Unparen(e) + e = ast.Unparen(e) if b, ok := e.(*ast.BinaryExpr); ok && b.Op == op.tok { seen[b] = true exprs = append(exprs, op.split(b.Y, seen)...) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go index 4e86439757..613583a1a6 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go @@ -19,7 +19,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" ) const debug = false @@ -65,7 +64,7 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t // Is this a C.f() call? var name string - if sel, ok := astutil.Unparen(call.Fun).(*ast.SelectorExpr); ok { + if sel, ok := ast.Unparen(call.Fun).(*ast.SelectorExpr); ok { if id, ok := sel.X.(*ast.Ident); ok && id.Name == "C" { name = sel.Sel.Name } @@ -180,7 +179,7 @@ func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*a for _, raw := range files { // If f is a cgo-generated file, Position reports // the original file, honoring //line directives. - filename := fset.Position(raw.Pos()).Filename + filename := fset.Position(raw.Pos()).Filename // sic: Pos, not FileStart f, err := parser.ParseFile(fset, filename, nil, parser.SkipObjectResolution) if err != nil { return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go index 8cc6c4a058..f56c3e622f 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -72,7 +71,7 @@ func run(pass *analysis.Pass) (interface{}, error) { return } var structuralTypes []types.Type - switch typ := aliases.Unalias(typ).(type) { + switch typ := types.Unalias(typ).(type) { case *types.TypeParam: terms, err := typeparams.StructuralTerms(typ) if err != nil { @@ -146,7 +145,7 @@ func run(pass *analysis.Pass) (interface{}, error) { // isLocalType reports whether typ belongs to the same package as pass. // TODO(adonovan): local means "internal to a function"; rename to isSamePackageType. func isLocalType(pass *analysis.Pass, typ types.Type) bool { - switch x := aliases.Unalias(typ).(type) { + switch x := types.Unalias(typ).(type) { case *types.Struct: // struct literals are local types return true diff --git a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go index 8f6e7db6a2..03496cb303 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go @@ -16,10 +16,9 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" + "golang.org/x/tools/internal/versions" ) const Doc = `check for locks erroneously passed by value @@ -40,18 +39,25 @@ var Analyzer = &analysis.Analyzer{ func run(pass *analysis.Pass) (interface{}, error) { inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector) + var goversion string // effective file version ("" => unknown) nodeFilter := []ast.Node{ (*ast.AssignStmt)(nil), (*ast.CallExpr)(nil), (*ast.CompositeLit)(nil), + (*ast.File)(nil), (*ast.FuncDecl)(nil), (*ast.FuncLit)(nil), (*ast.GenDecl)(nil), (*ast.RangeStmt)(nil), (*ast.ReturnStmt)(nil), } - inspect.Preorder(nodeFilter, func(node ast.Node) { + inspect.WithStack(nodeFilter, func(node ast.Node, push bool, stack []ast.Node) bool { + if !push { + return false + } switch node := node.(type) { + case *ast.File: + goversion = versions.FileVersion(pass.TypesInfo, node) case *ast.RangeStmt: checkCopyLocksRange(pass, node) case *ast.FuncDecl: @@ -61,7 +67,7 @@ func run(pass *analysis.Pass) (interface{}, error) { case *ast.CallExpr: checkCopyLocksCallExpr(pass, node) case *ast.AssignStmt: - checkCopyLocksAssign(pass, node) + checkCopyLocksAssign(pass, node, goversion, parent(stack)) case *ast.GenDecl: checkCopyLocksGenDecl(pass, node) case *ast.CompositeLit: @@ -69,16 +75,36 @@ func run(pass *analysis.Pass) (interface{}, error) { case *ast.ReturnStmt: checkCopyLocksReturnStmt(pass, node) } + return true }) return nil, nil } // checkCopyLocksAssign checks whether an assignment // copies a lock. -func checkCopyLocksAssign(pass *analysis.Pass, as *ast.AssignStmt) { - for i, x := range as.Rhs { +func checkCopyLocksAssign(pass *analysis.Pass, assign *ast.AssignStmt, goversion string, parent ast.Node) { + lhs := assign.Lhs + for i, x := range assign.Rhs { if path := lockPathRhs(pass, x); path != nil { - pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, as.Lhs[i]), path) + pass.ReportRangef(x, "assignment copies lock value to %v: %v", analysisutil.Format(pass.Fset, assign.Lhs[i]), path) + lhs = nil // An lhs has been reported. We prefer the assignment warning and do not report twice. + } + } + + // After GoVersion 1.22, loop variables are implicitly copied on each iteration. + // So a for statement may inadvertently copy a lock when any of the + // iteration variables contain locks. + if assign.Tok == token.DEFINE && versions.AtLeast(goversion, versions.Go1_22) { + if parent, _ := parent.(*ast.ForStmt); parent != nil && parent.Init == assign { + for _, l := range lhs { + if id, ok := l.(*ast.Ident); ok && id.Name != "_" { + if obj := pass.TypesInfo.Defs[id]; obj != nil && obj.Type() != nil { + if path := lockPath(pass.Pkg, obj.Type(), nil); path != nil { + pass.ReportRangef(l, "for loop iteration copies lock value to %v: %v", analysisutil.Format(pass.Fset, l), path) + } + } + } + } } } } @@ -225,7 +251,7 @@ func (path typePath) String() string { } func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { - x = astutil.Unparen(x) // ignore parens on rhs + x = ast.Unparen(x) // ignore parens on rhs if _, ok := x.(*ast.CompositeLit); ok { return nil @@ -235,7 +261,7 @@ func lockPathRhs(pass *analysis.Pass, x ast.Expr) typePath { return nil } if star, ok := x.(*ast.StarExpr); ok { - if _, ok := astutil.Unparen(star.X).(*ast.CallExpr); ok { + if _, ok := ast.Unparen(star.X).(*ast.CallExpr); ok { // A call may return a pointer to a zero value. return nil } @@ -259,7 +285,7 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ } seen[typ] = true - if tpar, ok := aliases.Unalias(typ).(*types.TypeParam); ok { + if tpar, ok := types.Unalias(typ).(*types.TypeParam); ok { terms, err := typeparams.StructuralTerms(tpar) if err != nil { return nil // invalid type @@ -340,6 +366,14 @@ func lockPath(tpkg *types.Package, typ types.Type, seen map[types.Type]bool) typ return nil } +// parent returns the second from the last node on stack if it exists. +func parent(stack []ast.Node) ast.Node { + if len(stack) >= 2 { + return stack[len(stack)-2] + } + return nil +} + var lockerType *types.Interface // Construct a sync.Locker interface type. diff --git a/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go b/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go index 95cd9a061e..70b5e39ecf 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/deepequalerrors/deepequalerrors.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" ) const Doc = `check for calls of reflect.DeepEqual on error values @@ -102,7 +101,7 @@ func containsError(typ types.Type) bool { return true } } - case *types.Named, *aliases.Alias: + case *types.Named, *types.Alias: return check(t.Underlying()) // We list the remaining valid type kinds for completeness. diff --git a/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go b/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go index 8af717b4c6..93fa39140e 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go @@ -53,7 +53,7 @@ so the analyzer is not included in typical suites such as vet or gopls. Use this standalone command to run it on your code: $ go install golang.org/x/tools/go/analysis/passes/fieldalignment/cmd/fieldalignment@latest - $ go fieldalignment [packages] + $ fieldalignment [packages] ` diff --git a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go index e1ca9b2f51..91ebe29de1 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go @@ -14,7 +14,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typesinternal" ) @@ -137,7 +136,7 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool { if analysisutil.IsNamedType(typ, "net/http", "Client") { return true // method on http.Client. } - ptr, ok := aliases.Unalias(typ).(*types.Pointer) + ptr, ok := types.Unalias(typ).(*types.Pointer) return ok && analysisutil.IsNamedType(ptr.Elem(), "net/http", "Client") // method on *http.Client. } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go index f7f071dc8b..a4fa8d31c4 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/internal/analysisutil/util.go @@ -15,7 +15,6 @@ import ( "os" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/analysisinternal" ) @@ -121,7 +120,7 @@ func Imports(pkg *types.Package, path string) bool { // This function avoids allocating the concatenation of "pkg.Name", // which is important for the performance of syntax matching. func IsNamedType(t types.Type, pkgPath string, names ...string) bool { - n, ok := aliases.Unalias(t).(*types.Named) + n, ok := types.Unalias(t).(*types.Named) if !ok { return false } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go index 28bf6c7e26..f789bdc811 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go @@ -10,7 +10,7 @@ // lostcancel: check cancel func returned by context.WithCancel is called // // The cancellation function returned by context.WithCancel, WithTimeout, -// and WithDeadline must be called or the new context will remain live -// until its parent context is cancelled. +// WithDeadline and variants such as WithCancelCause must be called, +// or the new context will remain live until its parent context is cancelled. // (The background context is never cancelled.) package lostcancel diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go index bf56a5c06f..26fdc1206f 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go @@ -198,7 +198,9 @@ func isContextWithCancel(info *types.Info, n ast.Node) bool { return false } switch sel.Sel.Name { - case "WithCancel", "WithTimeout", "WithDeadline": + case "WithCancel", "WithCancelCause", + "WithTimeout", "WithTimeoutCause", + "WithDeadline", "WithDeadlineCause": default: return false } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go b/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go index 0c7e9c5d06..faaf12a938 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go @@ -14,7 +14,6 @@ import ( "golang.org/x/tools/go/analysis/passes/buildssa" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -300,7 +299,7 @@ func nilnessOf(stack []fact, v ssa.Value) nilness { } case *ssa.MakeInterface: // A MakeInterface is non-nil unless its operand is a type parameter. - tparam, ok := aliases.Unalias(v.X.Type()).(*types.TypeParam) + tparam, ok := types.Unalias(v.X.Type()).(*types.TypeParam) if !ok { return isnonnil } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go index b3453f8fc0..2d79d0b033 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go @@ -24,7 +24,6 @@ import ( "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -159,10 +158,11 @@ func maybePrintfWrapper(info *types.Info, decl ast.Decl) *printfWrapper { params := sig.Params() nparams := params.Len() // variadic => nonzero + // Check final parameter is "args ...interface{}". args := params.At(nparams - 1) - iface, ok := args.Type().(*types.Slice).Elem().(*types.Interface) + iface, ok := types.Unalias(args.Type().(*types.Slice).Elem()).(*types.Interface) if !ok || !iface.Empty() { - return nil // final (args) param is not ...interface{} + return nil } // Is second last param 'format string'? @@ -514,7 +514,7 @@ func checkPrintf(pass *analysis.Pass, kind Kind, call *ast.CallExpr, fn *types.F // non-constant format string and no arguments: // if msg contains "%", misformatting occurs. // Report the problem and suggest a fix: fmt.Printf("%s", msg). - if idx == len(call.Args)-1 { + if !suppressNonconstants && idx == len(call.Args)-1 { pass.Report(analysis.Diagnostic{ Pos: formatArg.Pos(), End: formatArg.End(), @@ -1012,7 +1012,7 @@ func checkPrint(pass *analysis.Pass, call *ast.CallExpr, fn *types.Func) { typ := params.At(firstArg).Type() typ = typ.(*types.Slice).Elem() - it, ok := aliases.Unalias(typ).(*types.Interface) + it, ok := types.Unalias(typ).(*types.Interface) if !ok || !it.Empty() { // Skip variadic functions accepting non-interface{} args. return @@ -1100,3 +1100,12 @@ func (ss stringSet) Set(flag string) error { } return nil } + +// suppressNonconstants suppresses reporting printf calls with +// non-constant formatting strings (proposal #60529) when true. +// +// This variable is to allow for staging the transition to newer +// versions of x/tools by vendoring. +// +// Remove this after the 1.24 release. +var suppressNonconstants bool diff --git a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go index 017c8a247e..f7e50f98a9 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/printf/types.go @@ -10,7 +10,6 @@ import ( "go/types" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -73,7 +72,7 @@ func (m *argMatcher) match(typ types.Type, topLevel bool) bool { return true } - if typ, _ := aliases.Unalias(typ).(*types.TypeParam); typ != nil { + if typ, _ := types.Unalias(typ).(*types.TypeParam); typ != nil { // Avoid infinite recursion through type parameters. if m.seen[typ] { return true @@ -276,7 +275,7 @@ func (m *argMatcher) match(typ types.Type, topLevel bool) bool { } func isConvertibleToString(typ types.Type) bool { - if bt, ok := aliases.Unalias(typ).(*types.Basic); ok && bt.Kind() == types.UntypedNil { + if bt, ok := types.Unalias(typ).(*types.Basic); ok && bt.Kind() == types.UntypedNil { // We explicitly don't want untyped nil, which is // convertible to both of the interfaces below, as it // would just panic anyway. diff --git a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go index d01eb1eebe..759ed0043f 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/shift/shift.go @@ -21,7 +21,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -100,7 +99,7 @@ func checkLongShift(pass *analysis.Pass, node ast.Node, x, y ast.Expr) { return } var structuralTypes []types.Type - switch t := aliases.Unalias(t).(type) { + switch t := types.Unalias(t).(type) { case *types.TypeParam: terms, err := typeparams.StructuralTerms(t) if err != nil { diff --git a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go index 0cade7bad7..0129102a33 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go @@ -203,7 +203,7 @@ func kvFuncSkipArgs(fn *types.Func) (int, bool) { // order to get to the ones that match the ...any parameter. // The first key is the dereferenced receiver type name, or "" for a function. var kvFuncs = map[string]map[string]int{ - "": map[string]int{ + "": { "Debug": 1, "Info": 1, "Warn": 1, @@ -215,7 +215,7 @@ var kvFuncs = map[string]map[string]int{ "Log": 3, "Group": 1, }, - "Logger": map[string]int{ + "Logger": { "Debug": 1, "Info": 1, "Warn": 1, @@ -227,7 +227,7 @@ var kvFuncs = map[string]map[string]int{ "Log": 3, "With": 0, }, - "Record": map[string]int{ + "Record": { "Add": 0, }, } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go index a3afbf696e..108600a2ba 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/stringintconv/string.go @@ -15,7 +15,6 @@ import ( "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/analysisinternal" "golang.org/x/tools/internal/typeparams" ) @@ -248,7 +247,7 @@ func run(pass *analysis.Pass) (interface{}, error) { func structuralTypes(t types.Type) ([]types.Type, error) { var structuralTypes []types.Type - if tp, ok := aliases.Unalias(t).(*types.TypeParam); ok { + if tp, ok := types.Unalias(t).(*types.TypeParam); ok { terms, err := typeparams.StructuralTerms(tp) if err != nil { return nil, err diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go index 828f95bc86..effcdc5700 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/testinggoroutine.go @@ -14,10 +14,8 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" ) //go:embed doc.go @@ -186,7 +184,7 @@ func withinScope(scope ast.Node, x *types.Var) bool { func goAsyncCall(info *types.Info, goStmt *ast.GoStmt, toDecl func(*types.Func) *ast.FuncDecl) *asyncCall { call := goStmt.Call - fun := astutil.Unparen(call.Fun) + fun := ast.Unparen(call.Fun) if id := funcIdent(fun); id != nil { if lit := funcLitInScope(id); lit != nil { return &asyncCall{region: lit, async: goStmt, scope: nil, fun: fun} @@ -213,7 +211,7 @@ func tRunAsyncCall(info *types.Info, call *ast.CallExpr) *asyncCall { return nil } - fun := astutil.Unparen(call.Args[1]) + fun := ast.Unparen(call.Args[1]) if lit, ok := fun.(*ast.FuncLit); ok { // function lit? return &asyncCall{region: lit, async: call, scope: lit, fun: fun} } @@ -243,7 +241,7 @@ var forbidden = []string{ // Returns (nil, nil, nil) if call is not of this form. func forbiddenMethod(info *types.Info, call *ast.CallExpr) (*types.Var, *types.Selection, *types.Func) { // Compare to typeutil.StaticCallee. - fun := astutil.Unparen(call.Fun) + fun := ast.Unparen(call.Fun) selExpr, ok := fun.(*ast.SelectorExpr) if !ok { return nil, nil, nil @@ -254,7 +252,7 @@ func forbiddenMethod(info *types.Info, call *ast.CallExpr) (*types.Var, *types.S } var x *types.Var - if id, ok := astutil.Unparen(selExpr.X).(*ast.Ident); ok { + if id, ok := ast.Unparen(selExpr.X).(*ast.Ident); ok { x, _ = info.Uses[id].(*types.Var) } if x == nil { @@ -271,7 +269,7 @@ func forbiddenMethod(info *types.Info, call *ast.CallExpr) (*types.Var, *types.S func formatMethod(sel *types.Selection, fn *types.Func) string { var ptr string rtype := sel.Recv() - if p, ok := aliases.Unalias(rtype).(*types.Pointer); ok { + if p, ok := types.Unalias(rtype).(*types.Pointer); ok { ptr = "*" rtype = p.Elem() } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go index ad815f1901..8c7a51ca52 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/testinggoroutine/util.go @@ -8,7 +8,6 @@ import ( "go/ast" "go/types" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/typeparams" ) @@ -56,7 +55,7 @@ func isMethodNamed(f *types.Func, pkgPath string, names ...string) bool { } func funcIdent(fun ast.Expr) *ast.Ident { - switch fun := astutil.Unparen(fun).(type) { + switch fun := ast.Unparen(fun).(type) { case *ast.IndexExpr, *ast.IndexListExpr: x, _, _, _ := typeparams.UnpackIndexExpr(fun) // necessary? id, _ := x.(*ast.Ident) diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go index 5b4598235c..36f2c43eb6 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go @@ -48,7 +48,7 @@ var acceptedFuzzTypes = []types.Type{ func run(pass *analysis.Pass) (interface{}, error) { for _, f := range pass.Files { - if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") { + if !strings.HasSuffix(pass.Fset.File(f.FileStart).Name(), "_test.go") { continue } for _, decl := range f.Decls { diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go index 14e4a6c1e4..272ae7fe04 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/unsafeptr/unsafeptr.go @@ -15,9 +15,7 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" - "golang.org/x/tools/internal/aliases" ) //go:embed doc.go @@ -70,7 +68,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { // Check unsafe.Pointer safety rules according to // https://golang.org/pkg/unsafe/#Pointer. - switch x := astutil.Unparen(x).(type) { + switch x := ast.Unparen(x).(type) { case *ast.SelectorExpr: // "(6) Conversion of a reflect.SliceHeader or // reflect.StringHeader Data field to or from Pointer." @@ -89,7 +87,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { // by the time we get to the conversion at the end. // For now approximate by saying that *Header is okay // but Header is not. - pt, ok := aliases.Unalias(info.Types[x.X].Type).(*types.Pointer) + pt, ok := types.Unalias(info.Types[x.X].Type).(*types.Pointer) if ok && isReflectHeader(pt.Elem()) { return true } @@ -119,7 +117,7 @@ func isSafeUintptr(info *types.Info, x ast.Expr) bool { // isSafeArith reports whether x is a pointer arithmetic expression that is safe // to convert to unsafe.Pointer. func isSafeArith(info *types.Info, x ast.Expr) bool { - switch x := astutil.Unparen(x).(type) { + switch x := ast.Unparen(x).(type) { case *ast.CallExpr: // Base case: initial conversion from unsafe.Pointer to uintptr. return len(x.Args) == 1 && diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go index 76f42b052e..c27d26dd6e 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedresult/unusedresult.go @@ -24,7 +24,6 @@ import ( "golang.org/x/tools/go/analysis" "golang.org/x/tools/go/analysis/passes/inspect" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/ast/inspector" "golang.org/x/tools/go/types/typeutil" ) @@ -101,7 +100,7 @@ func run(pass *analysis.Pass) (interface{}, error) { (*ast.ExprStmt)(nil), } inspect.Preorder(nodeFilter, func(n ast.Node) { - call, ok := astutil.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr) + call, ok := ast.Unparen(n.(*ast.ExprStmt).X).(*ast.CallExpr) if !ok { return // not a call statement } diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go index a99c548335..2e209c8a6c 100644 --- a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go +++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go @@ -12,7 +12,6 @@ import ( "golang.org/x/tools/go/analysis/passes/buildssa" "golang.org/x/tools/go/analysis/passes/internal/analysisutil" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -29,10 +28,17 @@ var Analyzer = &analysis.Analyzer{ Run: run, } -func run(pass *analysis.Pass) (interface{}, error) { +func run(pass *analysis.Pass) (any, error) { + for _, pkg := range pass.Pkg.Imports() { + if pkg.Path() == "unsafe" { + // See golang/go#67684, or testdata/src/importsunsafe: the unusedwrite + // analyzer may have false positives when used with unsafe. + return nil, nil + } + } + ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA) for _, fn := range ssainput.SrcFuncs { - // TODO(taking): Iterate over fn._Instantiations() once exported. If so, have 1 report per Pos(). reports := checkStores(fn) for _, store := range reports { switch addr := store.Addr.(type) { @@ -143,7 +149,7 @@ func hasStructOrArrayType(v ssa.Value) bool { // func (t T) f() { ...} // the receiver object is of type *T: // t0 = local T (t) *T - if tp, ok := aliases.Unalias(alloc.Type()).(*types.Pointer); ok { + if tp, ok := types.Unalias(alloc.Type()).(*types.Pointer); ok { return isStructOrArray(tp.Elem()) } return false diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go index 18d1adb05d..a6b5ed0a89 100644 --- a/vendor/golang.org/x/tools/go/ast/astutil/imports.go +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -344,7 +344,12 @@ func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (r } // UsesImport reports whether a given import is used. +// The provided File must have been parsed with syntactic object resolution +// (not using go/parser.SkipObjectResolution). func UsesImport(f *ast.File, path string) (used bool) { + if f.Scope == nil { + panic("file f was not parsed with syntactic object resolution") + } spec := importSpec(f, path) if spec == nil { return diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go index 6bdcf70ac2..ca71e3e105 100644 --- a/vendor/golang.org/x/tools/go/ast/astutil/util.go +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -7,13 +7,5 @@ package astutil import "go/ast" // Unparen returns e with any enclosing parentheses stripped. -// TODO(adonovan): use go1.22's ast.Unparen. -func Unparen(e ast.Expr) ast.Expr { - for { - p, ok := e.(*ast.ParenExpr) - if !ok { - return e - } - e = p.X - } -} +// Deprecated: use [ast.Unparen]. +func Unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } diff --git a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go index 1fc1de0bd1..958cf38deb 100644 --- a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go +++ b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go @@ -73,6 +73,15 @@ func (in *Inspector) Preorder(types []ast.Node, f func(ast.Node)) { // check, Preorder is almost twice as fast as Nodes. The two // features seem to contribute similar slowdowns (~1.4x each). + // This function is equivalent to the PreorderSeq call below, + // but to avoid the additional dynamic call (which adds 13-35% + // to the benchmarks), we expand it out. + // + // in.PreorderSeq(types...)(func(n ast.Node) bool { + // f(n) + // return true + // }) + mask := maskOf(types) for i := 0; i < len(in.events); { ev := in.events[i] @@ -171,7 +180,9 @@ func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, s // traverse builds the table of events representing a traversal. func traverse(files []*ast.File) []event { // Preallocate approximate number of events - // based on source file extent. + // based on source file extent of the declarations. + // (We use End-Pos not FileStart-FileEnd to neglect + // the effect of long doc comments.) // This makes traverse faster by 4x (!). var extent int for _, f := range files { diff --git a/vendor/golang.org/x/tools/go/ast/inspector/iter.go b/vendor/golang.org/x/tools/go/ast/inspector/iter.go new file mode 100644 index 0000000000..b7e959114c --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/inspector/iter.go @@ -0,0 +1,85 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.23 + +package inspector + +import ( + "go/ast" + "iter" +) + +// PreorderSeq returns an iterator that visits all the +// nodes of the files supplied to New in depth-first order. +// It visits each node n before n's children. +// The complete traversal sequence is determined by ast.Inspect. +// +// The types argument, if non-empty, enables type-based +// filtering of events: only nodes whose type matches an +// element of the types slice are included in the sequence. +func (in *Inspector) PreorderSeq(types ...ast.Node) iter.Seq[ast.Node] { + + // This implementation is identical to Preorder, + // except that it supports breaking out of the loop. + + return func(yield func(ast.Node) bool) { + mask := maskOf(types) + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.index > i { + // push + if ev.typ&mask != 0 { + if !yield(ev.node) { + break + } + } + pop := ev.index + if in.events[pop].typ&mask == 0 { + // Subtrees do not contain types: skip them and pop. + i = pop + 1 + continue + } + } + i++ + } + } +} + +// All[N] returns an iterator over all the nodes of type N. +// N must be a pointer-to-struct type that implements ast.Node. +// +// Example: +// +// for call := range All[*ast.CallExpr](in) { ... } +func All[N interface { + *S + ast.Node +}, S any](in *Inspector) iter.Seq[N] { + + // To avoid additional dynamic call overheads, + // we duplicate rather than call the logic of PreorderSeq. + + mask := typeOf((N)(nil)) + return func(yield func(N) bool) { + for i := 0; i < len(in.events); { + ev := in.events[i] + if ev.index > i { + // push + if ev.typ&mask != 0 { + if !yield(ev.node.(N)) { + break + } + } + pop := ev.index + if in.events[pop].typ&mask == 0 { + // Subtrees do not contain types: skip them and pop. + i = pop + 1 + continue + } + } + i++ + } + } +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go index 137cc8df1d..f3ab0a2e12 100644 --- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go +++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -2,22 +2,64 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Package gcexportdata provides functions for locating, reading, and -// writing export data files containing type information produced by the -// gc compiler. This package supports go1.7 export data format and all -// later versions. -// -// Although it might seem convenient for this package to live alongside -// go/types in the standard library, this would cause version skew -// problems for developer tools that use it, since they must be able to -// consume the outputs of the gc compiler both before and after a Go -// update such as from Go 1.7 to Go 1.8. Because this package lives in -// golang.org/x/tools, sites can update their version of this repo some -// time before the Go 1.8 release and rebuild and redeploy their -// developer tools, which will then be able to consume both Go 1.7 and -// Go 1.8 export data files, so they will work before and after the -// Go update. (See discussion at https://golang.org/issue/15651.) -package gcexportdata // import "golang.org/x/tools/go/gcexportdata" +// Package gcexportdata provides functions for reading and writing +// export data, which is a serialized description of the API of a Go +// package including the names, kinds, types, and locations of all +// exported declarations. +// +// The standard Go compiler (cmd/compile) writes an export data file +// for each package it compiles, which it later reads when compiling +// packages that import the earlier one. The compiler must thus +// contain logic to both write and read export data. +// (See the "Export" section in the cmd/compile/README file.) +// +// The [Read] function in this package can read files produced by the +// compiler, producing [go/types] data structures. As a matter of +// policy, Read supports export data files produced by only the last +// two Go releases plus tip; see https://go.dev/issue/68898. The +// export data files produced by the compiler contain additional +// details related to generics, inlining, and other optimizations that +// cannot be decoded by the [Read] function. +// +// In files written by the compiler, the export data is not at the +// start of the file. Before calling Read, use [NewReader] to locate +// the desired portion of the file. +// +// The [Write] function in this package encodes the exported API of a +// Go package ([types.Package]) as a file. Such files can be later +// decoded by Read, but cannot be consumed by the compiler. +// +// # Future changes +// +// Although Read supports the formats written by both Write and the +// compiler, the two are quite different, and there is an open +// proposal (https://go.dev/issue/69491) to separate these APIs. +// +// Under that proposal, this package would ultimately provide only the +// Read operation for compiler export data, which must be defined in +// this module (golang.org/x/tools), not in the standard library, to +// avoid version skew for developer tools that need to read compiler +// export data both before and after a Go release, such as from Go +// 1.23 to Go 1.24. Because this package lives in the tools module, +// clients can update their version of the module some time before the +// Go 1.24 release and rebuild and redeploy their tools, which will +// then be able to consume both Go 1.23 and Go 1.24 export data files, +// so they will work before and after the Go update. (See discussion +// at https://go.dev/issue/15651.) +// +// The operations to import and export [go/types] data structures +// would be defined in the go/types package as Import and Export. +// [Write] would (eventually) delegate to Export, +// and [Read], when it detects a file produced by Export, +// would delegate to Import. +// +// # Deprecations +// +// The [NewImporter] and [Find] functions are deprecated and should +// not be used in new code. The [WriteBundle] and [ReadBundle] +// functions are experimental, and there is an open proposal to +// deprecate them (https://go.dev/issue/69573). +package gcexportdata import ( "bufio" @@ -100,6 +142,11 @@ func readAll(r io.Reader) ([]byte, error) { // Read reads export data from in, decodes it, and returns type // information for the package. // +// Read is capable of reading export data produced by [Write] at the +// same source code version, or by the last two Go releases (plus tip) +// of the standard Go compiler. Reading files from older compilers may +// produce an error. +// // The package path (effectively its linker symbol prefix) is // specified by path, since unlike the package name, this information // may not be recorded in the export data. @@ -128,14 +175,26 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, // (from "version"). Select appropriate importer. if len(data) > 0 { switch data[0] { - case 'v', 'c', 'd': // binary, till go1.10 + case 'v', 'c', 'd': + // binary, produced by cmd/compile till go1.10 return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - case 'i': // indexed, till go1.19 + case 'i': + // indexed, produced by cmd/compile till go1.19, + // and also by [Write]. + // + // If proposal #69491 is accepted, go/types + // serialization will be implemented by + // types.Export, to which Write would eventually + // delegate (explicitly dropping any pretence at + // inter-version Write-Read compatibility). + // This [Read] function would delegate to types.Import + // when it detects that the file was produced by Export. _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) return pkg, err - case 'u': // unified, from go1.20 + case 'u': + // unified, produced by cmd/compile since go1.20 _, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path) return pkg, err diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go index 013c0f505b..2d4865f664 100644 --- a/vendor/golang.org/x/tools/go/loader/loader.go +++ b/vendor/golang.org/x/tools/go/loader/loader.go @@ -23,7 +23,6 @@ import ( "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/internal/cgo" - "golang.org/x/tools/internal/versions" ) var ignoreVendor build.ImportMode @@ -341,13 +340,12 @@ func (conf *Config) addImport(path string, tests bool) { func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { for _, info := range prog.AllPackages { for _, f := range info.Files { - if f.Pos() == token.NoPos { - // This can happen if the parser saw - // too many errors and bailed out. - // (Use parser.AllErrors to prevent that.) + if f.FileStart == token.NoPos { + // Workaround for #70162 (undefined FileStart). + // TODO(adonovan): delete once go1.24 is assured. continue } - if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { + if !tokenFileContainsPos(prog.Fset.File(f.FileStart), start) { continue } if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { @@ -1029,18 +1027,18 @@ func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { info := &PackageInfo{ Pkg: pkg, Info: types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), }, errorFunc: imp.conf.TypeChecker.Error, dir: dir, } - versions.InitFileVersions(&info.Info) // Copy the types.Config so we can vary it across PackageInfos. tc := imp.conf.TypeChecker diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go index 3531ac8f5f..f1931d10ee 100644 --- a/vendor/golang.org/x/tools/go/packages/doc.go +++ b/vendor/golang.org/x/tools/go/packages/doc.go @@ -64,7 +64,7 @@ graph using the Imports fields. The Load function can be configured by passing a pointer to a Config as the first argument. A nil Config is equivalent to the zero Config, which -causes Load to run in LoadFiles mode, collecting minimal information. +causes Load to run in [LoadFiles] mode, collecting minimal information. See the documentation for type Config for details. As noted earlier, the Config.Mode controls the amount of detail @@ -72,14 +72,14 @@ reported about the loaded packages. See the documentation for type LoadMode for details. Most tools should pass their command-line arguments (after any flags) -uninterpreted to [Load], so that it can interpret them +uninterpreted to Load, so that it can interpret them according to the conventions of the underlying build system. See the Example function for typical usage. # The driver protocol -[Load] may be used to load Go packages even in Go projects that use +Load may be used to load Go packages even in Go projects that use alternative build systems, by installing an appropriate "driver" program for the build system and specifying its location in the GOPACKAGESDRIVER environment variable. @@ -97,6 +97,15 @@ JSON-encoded [DriverRequest] message providing additional information is written to the driver's standard input. The driver must write a JSON-encoded [DriverResponse] message to its standard output. (This message differs from the JSON schema produced by 'go list'.) + +The value of the PWD environment variable seen by the driver process +is the preferred name of its working directory. (The working directory +may have other aliases due to symbolic links; see the comment on the +Dir field of [exec.Cmd] for related information.) +When the driver process emits in its response the name of a file +that is a descendant of this directory, it must use an absolute path +that has the value of PWD as a prefix, to ensure that the returned +filenames satisfy the original query. */ package packages // import "golang.org/x/tools/go/packages" diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go index c2b4b711b5..96db9daf31 100644 --- a/vendor/golang.org/x/tools/go/packages/external.go +++ b/vendor/golang.org/x/tools/go/packages/external.go @@ -79,10 +79,10 @@ type DriverResponse struct { // driver is the type for functions that query the build system for the // packages named by the patterns. -type driver func(cfg *Config, patterns ...string) (*DriverResponse, error) +type driver func(cfg *Config, patterns []string) (*DriverResponse, error) // findExternalDriver returns the file path of a tool that supplies -// the build system package structure, or "" if not found." +// the build system package structure, or "" if not found. // If GOPACKAGESDRIVER is set in the environment findExternalTool returns its // value, otherwise it searches for a binary named gopackagesdriver on the PATH. func findExternalDriver(cfg *Config) driver { @@ -103,7 +103,7 @@ func findExternalDriver(cfg *Config) driver { return nil } } - return func(cfg *Config, words ...string) (*DriverResponse, error) { + return func(cfg *Config, patterns []string) (*DriverResponse, error) { req, err := json.Marshal(DriverRequest{ Mode: cfg.Mode, Env: cfg.Env, @@ -117,7 +117,7 @@ func findExternalDriver(cfg *Config) driver { buf := new(bytes.Buffer) stderr := new(bytes.Buffer) - cmd := exec.CommandContext(cfg.Context, tool, words...) + cmd := exec.CommandContext(cfg.Context, tool, patterns...) cmd.Dir = cfg.Dir // The cwd gets resolved to the real path. On Darwin, where // /tmp is a symlink, this breaks anything that expects the diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go index 1a3a5b44f5..76f910ecec 100644 --- a/vendor/golang.org/x/tools/go/packages/golist.go +++ b/vendor/golang.org/x/tools/go/packages/golist.go @@ -80,6 +80,12 @@ type golistState struct { cfg *Config ctx context.Context + runner *gocommand.Runner + + // overlay is the JSON file that encodes the Config.Overlay + // mapping, used by 'go list -overlay=...'. + overlay string + envOnce sync.Once goEnvError error goEnv map[string]string @@ -127,7 +133,10 @@ func (state *golistState) mustGetEnv() map[string]string { // goListDriver uses the go list command to interpret the patterns and produce // the build system package structure. // See driver for more details. -func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) { +// +// overlay is the JSON file that encodes the cfg.Overlay +// mapping, used by 'go list -overlay=...' +func goListDriver(cfg *Config, runner *gocommand.Runner, overlay string, patterns []string) (_ *DriverResponse, err error) { // Make sure that any asynchronous go commands are killed when we return. parentCtx := cfg.Context if parentCtx == nil { @@ -142,13 +151,15 @@ func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error cfg: cfg, ctx: ctx, vendorDirs: map[string]bool{}, + overlay: overlay, + runner: runner, } // Fill in response.Sizes asynchronously if necessary. - if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { + if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { errCh := make(chan error) go func() { - compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), cfg.gocmdRunner) + compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), runner) response.dr.Compiler = compiler response.dr.Arch = arch errCh <- err @@ -681,7 +692,7 @@ func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool { // getGoVersion returns the effective minor version of the go command. func (state *golistState) getGoVersion() (int, error) { state.goVersionOnce.Do(func() { - state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner) + state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.runner) }) return state.goVersion, state.goVersionError } @@ -751,7 +762,7 @@ func jsonFlag(cfg *Config, goVersion int) string { } } addFields("Name", "ImportPath", "Error") // These fields are always needed - if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 { + if cfg.Mode&NeedFiles != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles", "CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles", "SwigFiles", "SwigCXXFiles", "SysoFiles") @@ -759,7 +770,7 @@ func jsonFlag(cfg *Config, goVersion int) string { addFields("TestGoFiles", "XTestGoFiles") } } - if cfg.Mode&NeedTypes != 0 { + if cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 { // CompiledGoFiles seems to be required for the test case TestCgoNoSyntax, // even when -compiled isn't passed in. // TODO(#52435): Should we make the test ask for -compiled, or automatically @@ -840,7 +851,7 @@ func (state *golistState) cfgInvocation() gocommand.Invocation { Env: cfg.Env, Logf: cfg.Logf, WorkingDir: cfg.Dir, - Overlay: cfg.goListOverlayFile, + Overlay: state.overlay, } } @@ -851,11 +862,8 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, inv := state.cfgInvocation() inv.Verb = verb inv.Args = args - gocmdRunner := cfg.gocmdRunner - if gocmdRunner == nil { - gocmdRunner = &gocommand.Runner{} - } - stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv) + + stdout, stderr, friendlyErr, err := state.runner.RunRaw(cfg.Context, inv) if err != nil { // Check for 'go' executable not being found. if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound { @@ -879,6 +887,12 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, return nil, friendlyErr } + // Return an error if 'go list' failed due to missing tools in + // $GOROOT/pkg/tool/$GOOS_$GOARCH (#69606). + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), `go: no such tool`) { + return nil, friendlyErr + } + // Is there an error running the C compiler in cgo? This will be reported in the "Error" field // and should be suppressed by go list -e. // diff --git a/vendor/golang.org/x/tools/go/packages/loadmode_string.go b/vendor/golang.org/x/tools/go/packages/loadmode_string.go index 5c080d21b5..5fcad6ea6d 100644 --- a/vendor/golang.org/x/tools/go/packages/loadmode_string.go +++ b/vendor/golang.org/x/tools/go/packages/loadmode_string.go @@ -9,49 +9,46 @@ import ( "strings" ) -var allModes = []LoadMode{ - NeedName, - NeedFiles, - NeedCompiledGoFiles, - NeedImports, - NeedDeps, - NeedExportFile, - NeedTypes, - NeedSyntax, - NeedTypesInfo, - NeedTypesSizes, +var modes = [...]struct { + mode LoadMode + name string +}{ + {NeedName, "NeedName"}, + {NeedFiles, "NeedFiles"}, + {NeedCompiledGoFiles, "NeedCompiledGoFiles"}, + {NeedImports, "NeedImports"}, + {NeedDeps, "NeedDeps"}, + {NeedExportFile, "NeedExportFile"}, + {NeedTypes, "NeedTypes"}, + {NeedSyntax, "NeedSyntax"}, + {NeedTypesInfo, "NeedTypesInfo"}, + {NeedTypesSizes, "NeedTypesSizes"}, + {NeedModule, "NeedModule"}, + {NeedEmbedFiles, "NeedEmbedFiles"}, + {NeedEmbedPatterns, "NeedEmbedPatterns"}, } -var modeStrings = []string{ - "NeedName", - "NeedFiles", - "NeedCompiledGoFiles", - "NeedImports", - "NeedDeps", - "NeedExportFile", - "NeedTypes", - "NeedSyntax", - "NeedTypesInfo", - "NeedTypesSizes", -} - -func (mod LoadMode) String() string { - m := mod - if m == 0 { +func (mode LoadMode) String() string { + if mode == 0 { return "LoadMode(0)" } var out []string - for i, x := range allModes { - if x > m { - break + // named bits + for _, item := range modes { + if (mode & item.mode) != 0 { + mode ^= item.mode + out = append(out, item.name) } - if (m & x) != 0 { - out = append(out, modeStrings[i]) - m = m ^ x + } + // unnamed residue + if mode != 0 { + if out == nil { + return fmt.Sprintf("LoadMode(%#x)", int(mode)) } + out = append(out, fmt.Sprintf("%#x", int(mode))) } - if m != 0 { - out = append(out, "Unknown") + if len(out) == 1 { + return out[0] } - return fmt.Sprintf("LoadMode(%s)", strings.Join(out, "|")) + return "(" + strings.Join(out, "|") + ")" } diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go index 0b6bfaff80..2ecc64238e 100644 --- a/vendor/golang.org/x/tools/go/packages/packages.go +++ b/vendor/golang.org/x/tools/go/packages/packages.go @@ -16,13 +16,13 @@ import ( "go/scanner" "go/token" "go/types" - "io" "log" "os" "path/filepath" "runtime" "strings" "sync" + "sync/atomic" "time" "golang.org/x/sync/errgroup" @@ -31,7 +31,6 @@ import ( "golang.org/x/tools/internal/gocommand" "golang.org/x/tools/internal/packagesinternal" "golang.org/x/tools/internal/typesinternal" - "golang.org/x/tools/internal/versions" ) // A LoadMode controls the amount of detail to return when loading. @@ -46,17 +45,17 @@ import ( // // Unfortunately there are a number of open bugs related to // interactions among the LoadMode bits: -// - https://github.com/golang/go/issues/56633 -// - https://github.com/golang/go/issues/56677 -// - https://github.com/golang/go/issues/58726 -// - https://github.com/golang/go/issues/63517 +// - https://github.com/golang/go/issues/56633 +// - https://github.com/golang/go/issues/56677 +// - https://github.com/golang/go/issues/58726 +// - https://github.com/golang/go/issues/63517 type LoadMode int const ( // NeedName adds Name and PkgPath. NeedName LoadMode = 1 << iota - // NeedFiles adds GoFiles and OtherFiles. + // NeedFiles adds GoFiles, OtherFiles, and IgnoredFiles NeedFiles // NeedCompiledGoFiles adds CompiledGoFiles. @@ -78,7 +77,7 @@ const ( // NeedSyntax adds Syntax and Fset. NeedSyntax - // NeedTypesInfo adds TypesInfo. + // NeedTypesInfo adds TypesInfo and Fset. NeedTypesInfo // NeedTypesSizes adds TypesSizes. @@ -103,25 +102,37 @@ const ( // NeedEmbedPatterns adds EmbedPatterns. NeedEmbedPatterns + + // Be sure to update loadmode_string.go when adding new items! ) const ( + // LoadFiles loads the name and file names for the initial packages. + // // Deprecated: LoadFiles exists for historical compatibility // and should not be used. Please directly specify the needed fields using the Need values. LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles + // LoadImports loads the name, file names, and import mapping for the initial packages. + // // Deprecated: LoadImports exists for historical compatibility // and should not be used. Please directly specify the needed fields using the Need values. LoadImports = LoadFiles | NeedImports + // LoadTypes loads exported type information for the initial packages. + // // Deprecated: LoadTypes exists for historical compatibility // and should not be used. Please directly specify the needed fields using the Need values. LoadTypes = LoadImports | NeedTypes | NeedTypesSizes + // LoadSyntax loads typed syntax for the initial packages. + // // Deprecated: LoadSyntax exists for historical compatibility // and should not be used. Please directly specify the needed fields using the Need values. LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo + // LoadAllSyntax loads typed syntax for the initial packages and all dependencies. + // // Deprecated: LoadAllSyntax exists for historical compatibility // and should not be used. Please directly specify the needed fields using the Need values. LoadAllSyntax = LoadSyntax | NeedDeps @@ -133,13 +144,7 @@ const ( // A Config specifies details about how packages should be loaded. // The zero value is a valid configuration. // -// Calls to Load do not modify this struct. -// -// TODO(adonovan): #67702: this is currently false: in fact, -// calls to [Load] do not modify the public fields of this struct, but -// may modify hidden fields, so concurrent calls to [Load] must not -// use the same Config. But perhaps we should reestablish the -// documented invariant. +// Calls to [Load] do not modify this struct. type Config struct { // Mode controls the level of information returned for each package. Mode LoadMode @@ -170,19 +175,10 @@ type Config struct { // Env []string - // gocmdRunner guards go command calls from concurrency errors. - gocmdRunner *gocommand.Runner - // BuildFlags is a list of command-line flags to be passed through to // the build system's query tool. BuildFlags []string - // modFile will be used for -modfile in go command invocations. - modFile string - - // modFlag will be used for -modfile in go command invocations. - modFlag string - // Fset provides source position information for syntax trees and types. // If Fset is nil, Load will use a new fileset, but preserve Fset's value. Fset *token.FileSet @@ -229,21 +225,24 @@ type Config struct { // drivers may vary in their level of support for overlays. Overlay map[string][]byte - // goListOverlayFile is the JSON file that encodes the Overlay - // mapping, used by 'go list -overlay=...' - goListOverlayFile string + // -- Hidden configuration fields only for use in x/tools -- + + // modFile will be used for -modfile in go command invocations. + modFile string + + // modFlag will be used for -modfile in go command invocations. + modFlag string } // Load loads and returns the Go packages named by the given patterns. // -// Config specifies loading options; -// nil behaves the same as an empty Config. +// The cfg parameter specifies loading options; nil behaves the same as an empty [Config]. // // The [Config.Mode] field is a set of bits that determine what kinds // of information should be computed and returned. Modes that require // more information tend to be slower. See [LoadMode] for details // and important caveats. Its zero value is equivalent to -// NeedName | NeedFiles | NeedCompiledGoFiles. +// [NeedName] | [NeedFiles] | [NeedCompiledGoFiles]. // // Each call to Load returns a new set of [Package] instances. // The Packages and their Imports form a directed acyclic graph. @@ -260,7 +259,7 @@ type Config struct { // Errors associated with a particular package are recorded in the // corresponding Package's Errors list, and do not cause Load to // return an error. Clients may need to handle such errors before -// proceeding with further analysis. The PrintErrors function is +// proceeding with further analysis. The [PrintErrors] function is // provided for convenient display of all errors. func Load(cfg *Config, patterns ...string) ([]*Package, error) { ld := newLoader(cfg) @@ -323,21 +322,24 @@ func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, erro } else if !response.NotHandled { return response, true, nil } - // (fall through) + // not handled: fall through } // go list fallback - // + // Write overlays once, as there are many calls // to 'go list' (one per chunk plus others too). - overlay, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay) + overlayFile, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay) if err != nil { return nil, false, err } defer cleanupOverlay() - cfg.goListOverlayFile = overlay - response, err := callDriverOnChunks(goListDriver, cfg, chunks) + var runner gocommand.Runner // (shared across many 'go list' calls) + driver := func(cfg *Config, patterns []string) (*DriverResponse, error) { + return goListDriver(cfg, &runner, overlayFile, patterns) + } + response, err := callDriverOnChunks(driver, cfg, chunks) if err != nil { return nil, false, err } @@ -375,16 +377,14 @@ func splitIntoChunks(patterns []string, argMax int) ([][]string, error) { func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) { if len(chunks) == 0 { - return driver(cfg) + return driver(cfg, nil) } responses := make([]*DriverResponse, len(chunks)) errNotHandled := errors.New("driver returned NotHandled") var g errgroup.Group for i, chunk := range chunks { - i := i - chunk := chunk g.Go(func() (err error) { - responses[i], err = driver(cfg, chunk...) + responses[i], err = driver(cfg, chunk) if responses[i] != nil && responses[i].NotHandled { err = errNotHandled } @@ -681,18 +681,19 @@ func (p *Package) String() string { return p.ID } // loaderPackage augments Package with state used during the loading phase type loaderPackage struct { *Package - importErrors map[string]error // maps each bad import to its error - loadOnce sync.Once - color uint8 // for cycle detection - needsrc bool // load from source (Mode >= LoadTypes) - needtypes bool // type information is either requested or depended on - initial bool // package was matched by a pattern - goVersion int // minor version number of go command on PATH + importErrors map[string]error // maps each bad import to its error + preds []*loaderPackage // packages that import this one + unfinishedSuccs atomic.Int32 // number of direct imports not yet loaded + color uint8 // for cycle detection + needsrc bool // load from source (Mode >= LoadTypes) + needtypes bool // type information is either requested or depended on + initial bool // package was matched by a pattern + goVersion int // minor version number of go command on PATH } // loader holds the working state of a single call to load. type loader struct { - pkgs map[string]*loaderPackage + pkgs map[string]*loaderPackage // keyed by Package.ID Config sizes types.Sizes // non-nil if needed by mode parseCache map[string]*parseValue @@ -738,9 +739,6 @@ func newLoader(cfg *Config) *loader { if ld.Config.Env == nil { ld.Config.Env = os.Environ() } - if ld.Config.gocmdRunner == nil { - ld.Config.gocmdRunner = &gocommand.Runner{} - } if ld.Context == nil { ld.Context = context.Background() } @@ -754,7 +752,7 @@ func newLoader(cfg *Config) *loader { ld.requestedMode = ld.Mode ld.Mode = impliedLoadMode(ld.Mode) - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { + if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { if ld.Fset == nil { ld.Fset = token.NewFileSet() } @@ -763,6 +761,7 @@ func newLoader(cfg *Config) *loader { // because we load source if export data is missing. if ld.ParseFile == nil { ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { + // We implicitly promise to keep doing ast.Object resolution. :( const mode = parser.AllErrors | parser.ParseComments return parser.ParseFile(fset, filename, src, mode) } @@ -794,7 +793,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe" // This package needs type information if the caller requested types and the package is // either a root, or it's a non-root and the user requested dependencies ... - needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) + needtypes := (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) // This package needs source if the call requested source (or types info, which implies source) // and the package is either a root, or itas a non- root and the user requested dependencies... needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) || @@ -819,9 +818,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { } } - if ld.Mode&NeedImports != 0 { - // Materialize the import graph. - + // Materialize the import graph if it is needed (NeedImports), + // or if we'll be using loadPackages (Need{Syntax|Types|TypesInfo}). + var leaves []*loaderPackage // packages with no unfinished successors + if ld.Mode&(NeedImports|NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { const ( white = 0 // new grey = 1 // in progress @@ -840,63 +840,76 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { // dependency on a package that does. These are the only packages // for which we load source code. var stack []*loaderPackage - var visit func(lpkg *loaderPackage) bool - visit = func(lpkg *loaderPackage) bool { - switch lpkg.color { - case black: - return lpkg.needsrc - case grey: + var visit func(from, lpkg *loaderPackage) bool + visit = func(from, lpkg *loaderPackage) bool { + if lpkg.color == grey { panic("internal error: grey node") } - lpkg.color = grey - stack = append(stack, lpkg) // push - stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports - lpkg.Imports = make(map[string]*Package, len(stubs)) - for importPath, ipkg := range stubs { - var importErr error - imp := ld.pkgs[ipkg.ID] - if imp == nil { - // (includes package "C" when DisableCgo) - importErr = fmt.Errorf("missing package: %q", ipkg.ID) - } else if imp.color == grey { - importErr = fmt.Errorf("import cycle: %s", stack) + if lpkg.color == white { + lpkg.color = grey + stack = append(stack, lpkg) // push + stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports + lpkg.Imports = make(map[string]*Package, len(stubs)) + for importPath, ipkg := range stubs { + var importErr error + imp := ld.pkgs[ipkg.ID] + if imp == nil { + // (includes package "C" when DisableCgo) + importErr = fmt.Errorf("missing package: %q", ipkg.ID) + } else if imp.color == grey { + importErr = fmt.Errorf("import cycle: %s", stack) + } + if importErr != nil { + if lpkg.importErrors == nil { + lpkg.importErrors = make(map[string]error) + } + lpkg.importErrors[importPath] = importErr + continue + } + + if visit(lpkg, imp) { + lpkg.needsrc = true + } + lpkg.Imports[importPath] = imp.Package } - if importErr != nil { - if lpkg.importErrors == nil { - lpkg.importErrors = make(map[string]error) + + // -- postorder -- + + // Complete type information is required for the + // immediate dependencies of each source package. + if lpkg.needsrc && ld.Mode&NeedTypes != 0 { + for _, ipkg := range lpkg.Imports { + ld.pkgs[ipkg.ID].needtypes = true } - lpkg.importErrors[importPath] = importErr - continue } - if visit(imp) { - lpkg.needsrc = true + // NeedTypeSizes causes TypeSizes to be set even + // on packages for which types aren't needed. + if ld.Mode&NeedTypesSizes != 0 { + lpkg.TypesSizes = ld.sizes } - lpkg.Imports[importPath] = imp.Package - } - // Complete type information is required for the - // immediate dependencies of each source package. - if lpkg.needsrc && ld.Mode&NeedTypes != 0 { - for _, ipkg := range lpkg.Imports { - ld.pkgs[ipkg.ID].needtypes = true + // Add packages with no imports directly to the queue of leaves. + if len(lpkg.Imports) == 0 { + leaves = append(leaves, lpkg) } + + stack = stack[:len(stack)-1] // pop + lpkg.color = black } - // NeedTypeSizes causes TypeSizes to be set even - // on packages for which types aren't needed. - if ld.Mode&NeedTypesSizes != 0 { - lpkg.TypesSizes = ld.sizes + // Add edge from predecessor. + if from != nil { + from.unfinishedSuccs.Add(+1) // incref + lpkg.preds = append(lpkg.preds, from) } - stack = stack[:len(stack)-1] // pop - lpkg.color = black return lpkg.needsrc } // For each initial package, create its import DAG. for _, lpkg := range initial { - visit(lpkg) + visit(nil, lpkg) } } else { @@ -909,16 +922,45 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { // Load type data and syntax if needed, starting at // the initial packages (roots of the import DAG). - if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 { - var wg sync.WaitGroup - for _, lpkg := range initial { - wg.Add(1) - go func(lpkg *loaderPackage) { - ld.loadRecursive(lpkg) - wg.Done() - }(lpkg) + if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 { + + // We avoid using g.SetLimit to limit concurrency as + // it makes g.Go stop accepting work, which prevents + // workers from enqeuing, and thus finishing, and thus + // allowing the group to make progress: deadlock. + // + // Instead we use the ioLimit and cpuLimit semaphores. + g, _ := errgroup.WithContext(ld.Context) + + // enqueues adds a package to the type-checking queue. + // It must have no unfinished successors. + var enqueue func(*loaderPackage) + enqueue = func(lpkg *loaderPackage) { + g.Go(func() error { + // Parse and type-check. + ld.loadPackage(lpkg) + + // Notify each waiting predecessor, + // and enqueue it when it becomes a leaf. + for _, pred := range lpkg.preds { + if pred.unfinishedSuccs.Add(-1) == 0 { // decref + enqueue(pred) + } + } + + return nil + }) + } + + // Load leaves first, adding new packages + // to the queue as they become leaves. + for _, leaf := range leaves { + enqueue(leaf) + } + + if err := g.Wait(); err != nil { + return nil, err // cancelled } - wg.Wait() } // If the context is done, return its error and @@ -965,7 +1007,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { if ld.requestedMode&NeedSyntax == 0 { ld.pkgs[i].Syntax = nil } - if ld.requestedMode&NeedTypes == 0 && ld.requestedMode&NeedSyntax == 0 { + if ld.requestedMode&(NeedSyntax|NeedTypes|NeedTypesInfo) == 0 { ld.pkgs[i].Fset = nil } if ld.requestedMode&NeedTypesInfo == 0 { @@ -982,31 +1024,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) { return result, nil } -// loadRecursive loads the specified package and its dependencies, -// recursively, in parallel, in topological order. -// It is atomic and idempotent. -// Precondition: ld.Mode&NeedTypes. -func (ld *loader) loadRecursive(lpkg *loaderPackage) { - lpkg.loadOnce.Do(func() { - // Load the direct dependencies, in parallel. - var wg sync.WaitGroup - for _, ipkg := range lpkg.Imports { - imp := ld.pkgs[ipkg.ID] - wg.Add(1) - go func(imp *loaderPackage) { - ld.loadRecursive(imp) - wg.Done() - }(imp) - } - wg.Wait() - ld.loadPackage(lpkg) - }) -} - -// loadPackage loads the specified package. +// loadPackage loads/parses/typechecks the specified package. // It must be called only once per Package, // after immediate dependencies are loaded. -// Precondition: ld.Mode & NeedTypes. +// Precondition: ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0. func (ld *loader) loadPackage(lpkg *loaderPackage) { if lpkg.PkgPath == "unsafe" { // Fill in the blanks to avoid surprises. @@ -1042,6 +1063,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { if !lpkg.needtypes && !lpkg.needsrc { return } + + // TODO(adonovan): this condition looks wrong: + // I think it should be lpkg.needtypes && !lpg.needsrc, + // so that NeedSyntax without NeedTypes can be satisfied by export data. if !lpkg.needsrc { if err := ld.loadFromExportData(lpkg); err != nil { lpkg.Errors = append(lpkg.Errors, Error{ @@ -1147,7 +1172,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { } lpkg.Syntax = files - if ld.Config.Mode&NeedTypes == 0 { + if ld.Config.Mode&(NeedTypes|NeedTypesInfo) == 0 { return } @@ -1158,16 +1183,20 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { return } - lpkg.TypesInfo = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + // Populate TypesInfo only if needed, as it + // causes the type checker to work much harder. + if ld.Config.Mode&NeedTypesInfo != 0 { + lpkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), + } } - versions.InitFileVersions(lpkg.TypesInfo) lpkg.TypesSizes = ld.sizes importer := importerFunc(func(path string) (*types.Package, error) { @@ -1220,6 +1249,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) { } } + // Type-checking is CPU intensive. + cpuLimit <- unit{} // acquire a token + defer func() { <-cpuLimit }() // release a token + typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) lpkg.importErrors = nil // no longer needed @@ -1284,8 +1317,11 @@ type importerFunc func(path string) (*types.Package, error) func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } // We use a counting semaphore to limit -// the number of parallel I/O calls per process. -var ioLimit = make(chan bool, 20) +// the number of parallel I/O calls or CPU threads per process. +var ( + ioLimit = make(chan unit, 20) + cpuLimit = make(chan unit, runtime.GOMAXPROCS(0)) +) func (ld *loader) parseFile(filename string) (*ast.File, error) { ld.parseCacheMu.Lock() @@ -1302,20 +1338,28 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) { var src []byte for f, contents := range ld.Config.Overlay { + // TODO(adonovan): Inefficient for large overlays. + // Do an exact name-based map lookup + // (for nonexistent files) followed by a + // FileID-based map lookup (for existing ones). if sameFile(f, filename) { src = contents + break } } var err error if src == nil { - ioLimit <- true // wait + ioLimit <- unit{} // acquire a token src, err = os.ReadFile(filename) - <-ioLimit // signal + <-ioLimit // release a token } if err != nil { v.err = err } else { + // Parsing is CPU intensive. + cpuLimit <- unit{} // acquire a token v.f, v.err = ld.ParseFile(ld.Fset, filename, src) + <-cpuLimit // release a token } close(v.ready) @@ -1330,18 +1374,21 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) { // Because files are scanned in parallel, the token.Pos // positions of the resulting ast.Files are not ordered. func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { - var wg sync.WaitGroup - n := len(filenames) - parsed := make([]*ast.File, n) - errors := make([]error, n) - for i, file := range filenames { - wg.Add(1) - go func(i int, filename string) { + var ( + n = len(filenames) + parsed = make([]*ast.File, n) + errors = make([]error, n) + ) + var g errgroup.Group + for i, filename := range filenames { + // This creates goroutines unnecessarily in the + // cache-hit case, but that case is uncommon. + g.Go(func() error { parsed[i], errors[i] = ld.parseFile(filename) - wg.Done() - }(i, file) + return nil + }) } - wg.Wait() + g.Wait() // Eliminate nils, preserving order. var o int @@ -1512,4 +1559,4 @@ func usesExportData(cfg *Config) bool { return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0 } -var _ interface{} = io.Discard // assert build toolchain is go1.16 or later +type unit struct{} diff --git a/vendor/golang.org/x/tools/go/ssa/builder.go b/vendor/golang.org/x/tools/go/ssa/builder.go index 55943e45d2..b109fbf3cd 100644 --- a/vendor/golang.org/x/tools/go/ssa/builder.go +++ b/vendor/golang.org/x/tools/go/ssa/builder.go @@ -82,7 +82,6 @@ import ( "runtime" "sync" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/versions" ) @@ -854,7 +853,7 @@ func (b *builder) expr0(fn *Function, e ast.Expr, tv types.TypeAndValue) Value { if types.IsInterface(rt) { // If v may be an interface type I (after instantiating), // we must emit a check that v is non-nil. - if recv, ok := aliases.Unalias(sel.recv).(*types.TypeParam); ok { + if recv, ok := types.Unalias(sel.recv).(*types.TypeParam); ok { // Emit a nil check if any possible instantiation of the // type parameter is an interface type. if typeSetOf(recv).Len() > 0 { @@ -2508,7 +2507,7 @@ func (b *builder) rangeFunc(fn *Function, x Value, tk, tv types.Type, rng *ast.R name: fmt.Sprintf("%s$%d", fn.Name(), anonIdx+1), Signature: ysig, Synthetic: "range-over-func yield", - pos: rangePosition(rng), + pos: rng.Range, parent: fn, anonIdx: int32(len(fn.AnonFuncs)), Pkg: fn.Pkg, @@ -2566,6 +2565,8 @@ func (b *builder) rangeFunc(fn *Function, x Value, tk, tv types.Type, rng *ast.R emitJump(fn, done) fn.currentBlock = done + // pop the stack for the range-over-func + fn.targets = fn.targets.tail } // buildYieldResume emits to fn code for how to resume execution once a call to @@ -2967,7 +2968,7 @@ func (b *builder) buildFromSyntax(fn *Function) { func (b *builder) buildYieldFunc(fn *Function) { // See builder.rangeFunc for detailed documentation on how fn is set up. // - // In psuedo-Go this roughly builds: + // In pseudo-Go this roughly builds: // func yield(_k tk, _v tv) bool { // if jump != READY { panic("yield function called after range loop exit") } // jump = BUSY @@ -2998,6 +2999,7 @@ func (b *builder) buildYieldFunc(fn *Function) { } } fn.targets = &targets{ + tail: fn.targets, _continue: ycont, // `break` statement targets fn.parent.targets._break. } @@ -3075,6 +3077,8 @@ func (b *builder) buildYieldFunc(fn *Function) { // unreachable. emitJump(fn, ycont) } + // pop the stack for the yield function + fn.targets = fn.targets.tail // Clean up exits and promote any unresolved exits to fn.parent. for _, e := range fn.exits { @@ -3104,17 +3108,17 @@ func (b *builder) buildYieldFunc(fn *Function) { fn.finishBody() } -// addRuntimeType records t as a runtime type, -// along with all types derivable from it using reflection. +// addMakeInterfaceType records non-interface type t as the type of +// the operand a MakeInterface operation, for [Program.RuntimeTypes]. // -// Acquires prog.runtimeTypesMu. -func addRuntimeType(prog *Program, t types.Type) { - prog.runtimeTypesMu.Lock() - defer prog.runtimeTypesMu.Unlock() - forEachReachable(&prog.MethodSets, t, func(t types.Type) bool { - prev, _ := prog.runtimeTypes.Set(t, true).(bool) - return !prev // already seen? - }) +// Acquires prog.makeInterfaceTypesMu. +func addMakeInterfaceType(prog *Program, t types.Type) { + prog.makeInterfaceTypesMu.Lock() + defer prog.makeInterfaceTypesMu.Unlock() + if prog.makeInterfaceTypes == nil { + prog.makeInterfaceTypes = make(map[types.Type]unit) + } + prog.makeInterfaceTypes[t] = unit{} } // Build calls Package.Build for each package in prog. diff --git a/vendor/golang.org/x/tools/go/ssa/const.go b/vendor/golang.org/x/tools/go/ssa/const.go index 2a4e0dde28..865329bfd3 100644 --- a/vendor/golang.org/x/tools/go/ssa/const.go +++ b/vendor/golang.org/x/tools/go/ssa/const.go @@ -14,7 +14,6 @@ import ( "strconv" "strings" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -114,7 +113,7 @@ func zeroString(t types.Type, from *types.Package) string { } case *types.Pointer, *types.Slice, *types.Interface, *types.Chan, *types.Map, *types.Signature: return "nil" - case *types.Named, *aliases.Alias: + case *types.Named, *types.Alias: return zeroString(t.Underlying(), from) case *types.Array, *types.Struct: return relType(t, from) + "{}" diff --git a/vendor/golang.org/x/tools/go/ssa/coretype.go b/vendor/golang.org/x/tools/go/ssa/coretype.go index 8c218f919f..d937134227 100644 --- a/vendor/golang.org/x/tools/go/ssa/coretype.go +++ b/vendor/golang.org/x/tools/go/ssa/coretype.go @@ -7,7 +7,6 @@ package ssa import ( "go/types" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -51,7 +50,7 @@ func typeSetOf(typ types.Type) termList { var terms []*types.Term var err error // typeSetOf(t) == typeSetOf(Unalias(t)) - switch typ := aliases.Unalias(typ).(type) { + switch typ := types.Unalias(typ).(type) { case *types.TypeParam: terms, err = typeparams.StructuralTerms(typ) case *types.Union: diff --git a/vendor/golang.org/x/tools/go/ssa/create.go b/vendor/golang.org/x/tools/go/ssa/create.go index 423bce8718..2fa3d0757a 100644 --- a/vendor/golang.org/x/tools/go/ssa/create.go +++ b/vendor/golang.org/x/tools/go/ssa/create.go @@ -193,11 +193,7 @@ func membersFromDecl(pkg *Package, decl ast.Decl, goversion string) { // // The real work of building SSA form for each function is not done // until a subsequent call to Package.Build. -// -// CreatePackage should not be called after building any package in -// the program. func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package { - // TODO(adonovan): assert that no package has yet been built. if pkg == nil { panic("nil pkg") // otherwise pkg.Scope below returns types.Universe! } diff --git a/vendor/golang.org/x/tools/go/ssa/emit.go b/vendor/golang.org/x/tools/go/ssa/emit.go index c664ff85a0..176c1e1a74 100644 --- a/vendor/golang.org/x/tools/go/ssa/emit.go +++ b/vendor/golang.org/x/tools/go/ssa/emit.go @@ -249,7 +249,7 @@ func emitConv(f *Function, val Value, typ types.Type) Value { // non-parameterized, as they are the set of runtime types. t := val.Type() if f.typeparams.Len() == 0 || !f.Prog.isParameterized(t) { - addRuntimeType(f.Prog, t) + addMakeInterfaceType(f.Prog, t) } mi := &MakeInterface{X: val} diff --git a/vendor/golang.org/x/tools/go/ssa/func.go b/vendor/golang.org/x/tools/go/ssa/func.go index 2ed63bfd53..010c128a9e 100644 --- a/vendor/golang.org/x/tools/go/ssa/func.go +++ b/vendor/golang.org/x/tools/go/ssa/func.go @@ -186,6 +186,20 @@ func targetedBlock(f *Function, tok token.Token) *BasicBlock { return targetedBlock(f.parent, tok) } +// instrs returns an iterator that returns each reachable instruction of the SSA function. +// TODO: return an iter.Seq once x/tools is on 1.23 +func (f *Function) instrs() func(yield func(i Instruction) bool) { + return func(yield func(i Instruction) bool) { + for _, block := range f.Blocks { + for _, instr := range block.Instrs { + if !yield(instr) { + return + } + } + } + } +} + // addResultVar adds a result for a variable v to f.results and v to f.returnVars. func (f *Function) addResultVar(v *types.Var) { result := emitLocalVar(f, v) diff --git a/vendor/golang.org/x/tools/go/ssa/methods.go b/vendor/golang.org/x/tools/go/ssa/methods.go index b9560183a9..4b116f4307 100644 --- a/vendor/golang.org/x/tools/go/ssa/methods.go +++ b/vendor/golang.org/x/tools/go/ssa/methods.go @@ -11,7 +11,7 @@ import ( "go/types" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" + "golang.org/x/tools/internal/typesinternal" ) // MethodValue returns the Function implementing method sel, building @@ -158,124 +158,23 @@ type methodSet struct { // // Thread-safe. // -// Acquires prog.runtimeTypesMu. +// Acquires prog.makeInterfaceTypesMu. func (prog *Program) RuntimeTypes() []types.Type { - prog.runtimeTypesMu.Lock() - defer prog.runtimeTypesMu.Unlock() - return prog.runtimeTypes.Keys() -} - -// forEachReachable calls f for type T and each type reachable from -// its type through reflection. -// -// The function f must use memoization to break cycles and -// return false when the type has already been visited. -// -// TODO(adonovan): publish in typeutil and share with go/callgraph/rta. -func forEachReachable(msets *typeutil.MethodSetCache, T types.Type, f func(types.Type) bool) { - var visit func(T types.Type, skip bool) - visit = func(T types.Type, skip bool) { - if !skip { - if !f(T) { - return - } - } - - // Recursion over signatures of each method. - tmset := msets.MethodSet(T) - for i := 0; i < tmset.Len(); i++ { - sig := tmset.At(i).Type().(*types.Signature) - // It is tempting to call visit(sig, false) - // but, as noted in golang.org/cl/65450043, - // the Signature.Recv field is ignored by - // types.Identical and typeutil.Map, which - // is confusing at best. - // - // More importantly, the true signature rtype - // reachable from a method using reflection - // has no receiver but an extra ordinary parameter. - // For the Read method of io.Reader we want: - // func(Reader, []byte) (int, error) - // but here sig is: - // func([]byte) (int, error) - // with .Recv = Reader (though it is hard to - // notice because it doesn't affect Signature.String - // or types.Identical). - // - // TODO(adonovan): construct and visit the correct - // non-method signature with an extra parameter - // (though since unnamed func types have no methods - // there is essentially no actual demand for this). - // - // TODO(adonovan): document whether or not it is - // safe to skip non-exported methods (as RTA does). - visit(sig.Params(), true) // skip the Tuple - visit(sig.Results(), true) // skip the Tuple - } - - switch T := T.(type) { - case *aliases.Alias: - visit(aliases.Unalias(T), skip) // emulates the pre-Alias behavior - - case *types.Basic: - // nop - - case *types.Interface: - // nop---handled by recursion over method set. - - case *types.Pointer: - visit(T.Elem(), false) - - case *types.Slice: - visit(T.Elem(), false) - - case *types.Chan: - visit(T.Elem(), false) - - case *types.Map: - visit(T.Key(), false) - visit(T.Elem(), false) - - case *types.Signature: - if T.Recv() != nil { - panic(fmt.Sprintf("Signature %s has Recv %s", T, T.Recv())) - } - visit(T.Params(), true) // skip the Tuple - visit(T.Results(), true) // skip the Tuple - - case *types.Named: - // A pointer-to-named type can be derived from a named - // type via reflection. It may have methods too. - visit(types.NewPointer(T), false) - - // Consider 'type T struct{S}' where S has methods. - // Reflection provides no way to get from T to struct{S}, - // only to S, so the method set of struct{S} is unwanted, - // so set 'skip' flag during recursion. - visit(T.Underlying(), true) // skip the unnamed type - - case *types.Array: - visit(T.Elem(), false) - - case *types.Struct: - for i, n := 0, T.NumFields(); i < n; i++ { - // TODO(adonovan): document whether or not - // it is safe to skip non-exported fields. - visit(T.Field(i).Type(), false) - } - - case *types.Tuple: - for i, n := 0, T.Len(); i < n; i++ { - visit(T.At(i).Type(), false) - } - - case *types.TypeParam, *types.Union: - // forEachReachable must not be called on parameterized types. - panic(T) - - default: - panic(T) - } + prog.makeInterfaceTypesMu.Lock() + defer prog.makeInterfaceTypesMu.Unlock() + + // Compute the derived types on demand, since many SSA clients + // never call RuntimeTypes, and those that do typically call + // it once (often within ssautil.AllFunctions, which will + // eventually not use it; see Go issue #69291.) This + // eliminates the need to eagerly compute all the element + // types during SSA building. + var runtimeTypes []types.Type + add := func(t types.Type) { runtimeTypes = append(runtimeTypes, t) } + var set typeutil.Map // for de-duping identical types + for t := range prog.makeInterfaceTypes { + typesinternal.ForEachElement(&set, &prog.MethodSets, t, add) } - visit(T, false) + + return runtimeTypes } diff --git a/vendor/golang.org/x/tools/go/ssa/print.go b/vendor/golang.org/x/tools/go/ssa/print.go index c890d7ee53..ef32672a26 100644 --- a/vendor/golang.org/x/tools/go/ssa/print.go +++ b/vendor/golang.org/x/tools/go/ssa/print.go @@ -39,16 +39,8 @@ func relName(v Value, i Instruction) string { return v.Name() } -// normalizeAnyForTesting controls whether we replace occurrences of -// interface{} with any. It is only used for normalizing test output. -var normalizeAnyForTesting bool - func relType(t types.Type, from *types.Package) string { - s := types.TypeString(t, types.RelativeTo(from)) - if normalizeAnyForTesting { - s = strings.ReplaceAll(s, "interface{}", "any") - } - return s + return types.TypeString(t, types.RelativeTo(from)) } func relTerm(term *types.Term, from *types.Package) string { diff --git a/vendor/golang.org/x/tools/go/ssa/sanity.go b/vendor/golang.org/x/tools/go/ssa/sanity.go index 285cba04a9..ef2928e3b7 100644 --- a/vendor/golang.org/x/tools/go/ssa/sanity.go +++ b/vendor/golang.org/x/tools/go/ssa/sanity.go @@ -407,14 +407,87 @@ func (s *sanity) checkReferrerList(v Value) { } } +func (s *sanity) checkFunctionParams() { + signature := s.fn.Signature + params := s.fn.Params + + // startSigParams is the start of signature.Params() within params. + startSigParams := 0 + if signature.Recv() != nil { + startSigParams = 1 + } + + if startSigParams+signature.Params().Len() != len(params) { + s.errorf("function has %d parameters in signature but has %d after building", + startSigParams+signature.Params().Len(), len(params)) + return + } + + for i, param := range params { + var sigType types.Type + si := i - startSigParams + if si < 0 { + sigType = signature.Recv().Type() + } else { + sigType = signature.Params().At(si).Type() + } + + if !types.Identical(sigType, param.Type()) { + s.errorf("expect type %s in signature but got type %s in param %d", param.Type(), sigType, i) + } + } +} + +// checkTransientFields checks whether all transient fields of Function are cleared. +func (s *sanity) checkTransientFields() { + fn := s.fn + if fn.build != nil { + s.errorf("function transient field 'build' is not nil") + } + if fn.currentBlock != nil { + s.errorf("function transient field 'currentBlock' is not nil") + } + if fn.vars != nil { + s.errorf("function transient field 'vars' is not nil") + } + if fn.results != nil { + s.errorf("function transient field 'results' is not nil") + } + if fn.returnVars != nil { + s.errorf("function transient field 'returnVars' is not nil") + } + if fn.targets != nil { + s.errorf("function transient field 'targets' is not nil") + } + if fn.lblocks != nil { + s.errorf("function transient field 'lblocks' is not nil") + } + if fn.subst != nil { + s.errorf("function transient field 'subst' is not nil") + } + if fn.jump != nil { + s.errorf("function transient field 'jump' is not nil") + } + if fn.deferstack != nil { + s.errorf("function transient field 'deferstack' is not nil") + } + if fn.source != nil { + s.errorf("function transient field 'source' is not nil") + } + if fn.exits != nil { + s.errorf("function transient field 'exits' is not nil") + } + if fn.uniq != 0 { + s.errorf("function transient field 'uniq' is not zero") + } +} + func (s *sanity) checkFunction(fn *Function) bool { - // TODO(adonovan): check Function invariants: - // - check params match signature - // - check transient fields are nil - // - warn if any fn.Locals do not appear among block instructions. + s.fn = fn + s.checkFunctionParams() + s.checkTransientFields() // TODO(taking): Sanity check origin, typeparams, and typeargs. - s.fn = fn if fn.Prog == nil { s.errorf("nil Prog") } @@ -452,7 +525,23 @@ func (s *sanity) checkFunction(fn *Function) bool { s.errorf("got fromSource=%t, hasSyntax=%t; want same values", src, syn) } } + + // Build the set of valid referrers. + s.instrs = make(map[Instruction]unit) + + // TODO: switch to range-over-func when x/tools updates to 1.23. + // instrs are the instructions that are present in the function. + fn.instrs()(func(instr Instruction) bool { + s.instrs[instr] = unit{} + return true + }) + + // Check all Locals allocations appear in the function instruction. for i, l := range fn.Locals { + if _, present := s.instrs[l]; !present { + s.warnf("function doesn't contain Local alloc %s", l.Name()) + } + if l.Parent() != fn { s.errorf("Local %s at index %d has wrong parent", l.Name(), i) } @@ -460,13 +549,6 @@ func (s *sanity) checkFunction(fn *Function) bool { s.errorf("Local %s at index %d has Heap flag set", l.Name(), i) } } - // Build the set of valid referrers. - s.instrs = make(map[Instruction]unit) - for _, b := range fn.Blocks { - for _, instr := range b.Instrs { - s.instrs[instr] = unit{} - } - } for i, p := range fn.Params { if p.Parent() != fn { s.errorf("Param %s at index %d has wrong parent", p.Name(), i) @@ -527,6 +609,19 @@ func sanityCheckPackage(pkg *Package) { if pkg.Pkg == nil { panic(fmt.Sprintf("Package %s has no Object", pkg)) } + if pkg.info != nil { + panic(fmt.Sprintf("package %s field 'info' is not cleared", pkg)) + } + if pkg.files != nil { + panic(fmt.Sprintf("package %s field 'files' is not cleared", pkg)) + } + if pkg.created != nil { + panic(fmt.Sprintf("package %s field 'created' is not cleared", pkg)) + } + if pkg.initVersion != nil { + panic(fmt.Sprintf("package %s field 'initVersion' is not cleared", pkg)) + } + _ = pkg.String() // must not crash for name, mem := range pkg.Members { diff --git a/vendor/golang.org/x/tools/go/ssa/ssa.go b/vendor/golang.org/x/tools/go/ssa/ssa.go index 1231afd9e0..4fa9831079 100644 --- a/vendor/golang.org/x/tools/go/ssa/ssa.go +++ b/vendor/golang.org/x/tools/go/ssa/ssa.go @@ -37,8 +37,9 @@ type Program struct { hasParamsMu sync.Mutex hasParams typeparams.Free - runtimeTypesMu sync.Mutex - runtimeTypes typeutil.Map // set of runtime types (from MakeInterface) + // set of concrete types used as MakeInterface operands + makeInterfaceTypesMu sync.Mutex + makeInterfaceTypes map[types.Type]unit // (may contain redundant identical types) // objectMethods is a memoization of objectMethod // to avoid creation of duplicate methods from type information. @@ -341,7 +342,7 @@ type Function struct { // source information Synthetic string // provenance of synthetic function; "" for true source functions syntax ast.Node // *ast.Func{Decl,Lit}, if from syntax (incl. generic instances) or (*ast.RangeStmt if a yield function) - info *types.Info // type annotations (iff syntax != nil) + info *types.Info // type annotations (if syntax != nil) goversion string // Go version of syntax (NB: init is special) parent *Function // enclosing function if anon; nil if global diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/deprecated.go b/vendor/golang.org/x/tools/go/ssa/ssautil/deprecated.go new file mode 100644 index 0000000000..4feff7131a --- /dev/null +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/deprecated.go @@ -0,0 +1,36 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package ssautil + +// This file contains deprecated public APIs. +// We discourage their use. + +import ( + "golang.org/x/tools/go/loader" + "golang.org/x/tools/go/ssa" +) + +// CreateProgram returns a new program in SSA form, given a program +// loaded from source. An SSA package is created for each transitively +// error-free package of lprog. +// +// Code for bodies of functions is not built until Build is called +// on the result. +// +// The mode parameter controls diagnostics and checking during SSA construction. +// +// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages] +// function instead; see ssa.Example_loadPackages. +func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { + prog := ssa.NewProgram(lprog.Fset, mode) + + for _, info := range lprog.AllPackages { + if info.TransitivelyErrorFree { + prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) + } + } + + return prog +} diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go index 3daa67a07e..c64b03f177 100644 --- a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go +++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go @@ -11,10 +11,8 @@ import ( "go/token" "go/types" - "golang.org/x/tools/go/loader" "golang.org/x/tools/go/packages" "golang.org/x/tools/go/ssa" - "golang.org/x/tools/internal/versions" ) // Packages creates an SSA program for a set of packages. @@ -111,29 +109,6 @@ func doPackages(initial []*packages.Package, mode ssa.BuilderMode, deps bool) (* return prog, ssapkgs } -// CreateProgram returns a new program in SSA form, given a program -// loaded from source. An SSA package is created for each transitively -// error-free package of lprog. -// -// Code for bodies of functions is not built until Build is called -// on the result. -// -// The mode parameter controls diagnostics and checking during SSA construction. -// -// Deprecated: Use [golang.org/x/tools/go/packages] and the [Packages] -// function instead; see ssa.Example_loadPackages. -func CreateProgram(lprog *loader.Program, mode ssa.BuilderMode) *ssa.Program { - prog := ssa.NewProgram(lprog.Fset, mode) - - for _, info := range lprog.AllPackages { - if info.TransitivelyErrorFree { - prog.CreatePackage(info.Pkg, info.Files, &info.Info, info.Importable) - } - } - - return prog -} - // BuildPackage builds an SSA program with SSA intermediate // representation (IR) for all functions of a single package. // @@ -158,15 +133,15 @@ func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, fil } info := &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Implicits: make(map[ast.Node]types.Object), - Instances: make(map[*ast.Ident]types.Instance), - Scopes: make(map[ast.Node]*types.Scope), - Selections: make(map[*ast.SelectorExpr]*types.Selection), + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Instances: make(map[*ast.Ident]types.Instance), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + FileVersions: make(map[*ast.File]string), } - versions.InitFileVersions(info) if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil { return nil, nil, err } diff --git a/vendor/golang.org/x/tools/go/ssa/subst.go b/vendor/golang.org/x/tools/go/ssa/subst.go index 4dcb871572..fc870235c4 100644 --- a/vendor/golang.org/x/tools/go/ssa/subst.go +++ b/vendor/golang.org/x/tools/go/ssa/subst.go @@ -144,7 +144,7 @@ func (subst *subster) typ(t types.Type) (res types.Type) { case *types.Interface: return subst.interface_(t) - case *aliases.Alias: + case *types.Alias: return subst.alias(t) case *types.Named: @@ -317,7 +317,7 @@ func (subst *subster) interface_(iface *types.Interface) *types.Interface { return types.NewInterfaceType(methods, embeds).Complete() } -func (subst *subster) alias(t *aliases.Alias) types.Type { +func (subst *subster) alias(t *types.Alias) types.Type { // See subster.named. This follows the same strategy. tparams := aliases.TypeParams(t) targs := aliases.TypeArgs(t) @@ -365,19 +365,19 @@ func (subst *subster) alias(t *aliases.Alias) types.Type { rhs := subst.typ(aliases.Rhs(t)) // Create the fresh alias. - obj := aliases.NewAlias(true, tname.Pos(), tname.Pkg(), tname.Name(), rhs) - fresh := obj.Type() - if fresh, ok := fresh.(*aliases.Alias); ok { - // TODO: assume ok when aliases are always materialized (go1.27). - aliases.SetTypeParams(fresh, newTParams) - } + // + // Until 1.27, the result of aliases.NewAlias(...).Type() cannot guarantee it is a *types.Alias. + // However, as t is an *alias.Alias and t is well-typed, then aliases must have been enabled. + // Follow this decision, and always enable aliases here. + const enabled = true + obj := aliases.NewAlias(enabled, tname.Pos(), tname.Pkg(), tname.Name(), rhs, newTParams) // Substitute into all of the constraints after they are created. for i, ntp := range newTParams { bound := tparams.At(i).Constraint() ntp.SetConstraint(subst.typ(bound)) } - return fresh + return obj.Type() } // t is declared within the function origin and has type arguments. @@ -633,7 +633,7 @@ func reaches(t types.Type, c map[types.Type]bool) (res bool) { return true } } - case *types.Named, *aliases.Alias: + case *types.Named, *types.Alias: return reaches(t.Underlying(), c) default: panic("unreachable") diff --git a/vendor/golang.org/x/tools/go/ssa/util.go b/vendor/golang.org/x/tools/go/ssa/util.go index 549c9c819e..cdc46209e7 100644 --- a/vendor/golang.org/x/tools/go/ssa/util.go +++ b/vendor/golang.org/x/tools/go/ssa/util.go @@ -15,9 +15,7 @@ import ( "os" "sync" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/go/types/typeutil" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" "golang.org/x/tools/internal/typesinternal" ) @@ -36,7 +34,7 @@ func assert(p bool, msg string) { //// AST utilities -func unparen(e ast.Expr) ast.Expr { return astutil.Unparen(e) } +func unparen(e ast.Expr) ast.Expr { return ast.Unparen(e) } // isBlankIdent returns true iff e is an Ident with name "_". // They have no associated types.Object, and thus no type. @@ -45,13 +43,6 @@ func isBlankIdent(e ast.Expr) bool { return ok && id.Name == "_" } -// rangePosition is the position to give for the `range` token in a RangeStmt. -var rangePosition = func(rng *ast.RangeStmt) token.Pos { - // Before 1.20, this is unreachable. - // rng.For is a close, but incorrect position. - return rng.For -} - //// Type utilities. Some of these belong in go/types. // isNonTypeParamInterface reports whether t is an interface type but not a type parameter. @@ -268,7 +259,7 @@ func instanceArgs(info *types.Info, id *ast.Ident) []types.Type { return targs } -// Mapping of a type T to a canonical instance C s.t. types.Indentical(T, C). +// Mapping of a type T to a canonical instance C s.t. types.Identical(T, C). // Thread-safe. type canonizer struct { mu sync.Mutex @@ -295,7 +286,7 @@ func (c *canonizer) List(ts []types.Type) *typeList { // Is there some top level alias? var found bool for _, t := range ts { - if _, ok := t.(*aliases.Alias); ok { + if _, ok := t.(*types.Alias); ok { found = true break } @@ -306,7 +297,7 @@ func (c *canonizer) List(ts []types.Type) *typeList { cp := make([]types.Type, len(ts)) // copy with top level aliases removed. for i, t := range ts { - cp[i] = aliases.Unalias(t) + cp[i] = types.Unalias(t) } return cp } @@ -323,7 +314,7 @@ func (c *canonizer) List(ts []types.Type) *typeList { // For performance, reasons the canonical instance is order-dependent, // and may contain deeply nested aliases. func (c *canonizer) Type(T types.Type) types.Type { - T = aliases.Unalias(T) // remove the top level alias. + T = types.Unalias(T) // remove the top level alias. c.mu.Lock() defer c.mu.Unlock() @@ -403,10 +394,10 @@ func (m *typeListMap) hash(ts []types.Type) uint32 { // instantiateMethod instantiates m with targs and returns a canonical representative for this method. func (canon *canonizer) instantiateMethod(m *types.Func, targs []types.Type, ctxt *types.Context) *types.Func { recv := recvType(m) - if p, ok := aliases.Unalias(recv).(*types.Pointer); ok { + if p, ok := types.Unalias(recv).(*types.Pointer); ok { recv = p.Elem() } - named := aliases.Unalias(recv).(*types.Named) + named := types.Unalias(recv).(*types.Named) inst, err := types.Instantiate(ctxt, named.Origin(), targs, false) if err != nil { panic(err) diff --git a/vendor/golang.org/x/tools/go/ssa/util_go120.go b/vendor/golang.org/x/tools/go/ssa/util_go120.go deleted file mode 100644 index 9e8ea874e1..0000000000 --- a/vendor/golang.org/x/tools/go/ssa/util_go120.go +++ /dev/null @@ -1,17 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.20 -// +build go1.20 - -package ssa - -import ( - "go/ast" - "go/token" -) - -func init() { - rangePosition = func(rng *ast.RangeStmt) token.Pos { return rng.Range } -} diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go index 9ada177758..16ed3c1780 100644 --- a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go +++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go @@ -228,7 +228,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { // Reject obviously non-viable cases. switch obj := obj.(type) { case *types.TypeName: - if _, ok := aliases.Unalias(obj.Type()).(*types.TypeParam); !ok { + if _, ok := types.Unalias(obj.Type()).(*types.TypeParam); !ok { // With the exception of type parameters, only package-level type names // have a path. return "", fmt.Errorf("no path for %v", obj) @@ -280,26 +280,26 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { path = append(path, opType) T := o.Type() - if alias, ok := T.(*aliases.Alias); ok { - if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam, nil); r != nil { + if alias, ok := T.(*types.Alias); ok { + if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam); r != nil { return Path(r), nil } - if r := find(obj, aliases.Rhs(alias), append(path, opRhs), nil); r != nil { + if r := find(obj, aliases.Rhs(alias), append(path, opRhs)); r != nil { return Path(r), nil } } else if tname.IsAlias() { // legacy alias - if r := find(obj, T, path, nil); r != nil { + if r := find(obj, T, path); r != nil { return Path(r), nil } } else if named, ok := T.(*types.Named); ok { // defined (named) type - if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam, nil); r != nil { + if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam); r != nil { return Path(r), nil } - if r := find(obj, named.Underlying(), append(path, opUnderlying), nil); r != nil { + if r := find(obj, named.Underlying(), append(path, opUnderlying)); r != nil { return Path(r), nil } } @@ -312,7 +312,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { if _, ok := o.(*types.TypeName); !ok { if o.Exported() { // exported non-type (const, var, func) - if r := find(obj, o.Type(), append(path, opType), nil); r != nil { + if r := find(obj, o.Type(), append(path, opType)); r != nil { return Path(r), nil } } @@ -320,7 +320,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { } // Inspect declared methods of defined types. - if T, ok := aliases.Unalias(o.Type()).(*types.Named); ok { + if T, ok := types.Unalias(o.Type()).(*types.Named); ok { path = append(path, opType) // The method index here is always with respect // to the underlying go/types data structures, @@ -332,7 +332,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) { if m == obj { return Path(path2), nil // found declared method } - if r := find(obj, m.Type(), append(path2, opType), nil); r != nil { + if r := find(obj, m.Type(), append(path2, opType)); r != nil { return Path(r), nil } } @@ -447,46 +447,64 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) { // // The seen map is used to short circuit cycles through type parameters. If // nil, it will be allocated as necessary. -func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte { +// +// The seenMethods map is used internally to short circuit cycles through +// interface methods, such as occur in the following example: +// +// type I interface { f() interface{I} } +// +// See golang/go#68046 for details. +func find(obj types.Object, T types.Type, path []byte) []byte { + return (&finder{obj: obj}).find(T, path) +} + +// finder closes over search state for a call to find. +type finder struct { + obj types.Object // the sought object + seenTParamNames map[*types.TypeName]bool // for cycle breaking through type parameters + seenMethods map[*types.Func]bool // for cycle breaking through recursive interfaces +} + +func (f *finder) find(T types.Type, path []byte) []byte { switch T := T.(type) { - case *aliases.Alias: - return find(obj, aliases.Unalias(T), path, seen) + case *types.Alias: + return f.find(types.Unalias(T), path) case *types.Basic, *types.Named: // Named types belonging to pkg were handled already, // so T must belong to another package. No path. return nil case *types.Pointer: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Slice: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Array: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Chan: - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Map: - if r := find(obj, T.Key(), append(path, opKey), seen); r != nil { + if r := f.find(T.Key(), append(path, opKey)); r != nil { return r } - return find(obj, T.Elem(), append(path, opElem), seen) + return f.find(T.Elem(), append(path, opElem)) case *types.Signature: - if r := findTypeParam(obj, T.RecvTypeParams(), path, opRecvTypeParam, nil); r != nil { + if r := f.findTypeParam(T.RecvTypeParams(), path, opRecvTypeParam); r != nil { return r } - if r := findTypeParam(obj, T.TypeParams(), path, opTypeParam, seen); r != nil { + if r := f.findTypeParam(T.TypeParams(), path, opTypeParam); r != nil { return r } - if r := find(obj, T.Params(), append(path, opParams), seen); r != nil { + if r := f.find(T.Params(), append(path, opParams)); r != nil { return r } - return find(obj, T.Results(), append(path, opResults), seen) + return f.find(T.Results(), append(path, opResults)) case *types.Struct: for i := 0; i < T.NumFields(); i++ { fld := T.Field(i) path2 := appendOpArg(path, opField, i) - if fld == obj { + if fld == f.obj { return path2 // found field var } - if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil { + if r := f.find(fld.Type(), append(path2, opType)); r != nil { return r } } @@ -495,10 +513,10 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] for i := 0; i < T.Len(); i++ { v := T.At(i) path2 := appendOpArg(path, opAt, i) - if v == obj { + if v == f.obj { return path2 // found param/result var } - if r := find(obj, v.Type(), append(path2, opType), seen); r != nil { + if r := f.find(v.Type(), append(path2, opType)); r != nil { return r } } @@ -506,28 +524,35 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] case *types.Interface: for i := 0; i < T.NumMethods(); i++ { m := T.Method(i) + if f.seenMethods[m] { + return nil + } path2 := appendOpArg(path, opMethod, i) - if m == obj { + if m == f.obj { return path2 // found interface method } - if r := find(obj, m.Type(), append(path2, opType), seen); r != nil { + if f.seenMethods == nil { + f.seenMethods = make(map[*types.Func]bool) + } + f.seenMethods[m] = true + if r := f.find(m.Type(), append(path2, opType)); r != nil { return r } } return nil case *types.TypeParam: name := T.Obj() - if name == obj { - return append(path, opObj) - } - if seen[name] { + if f.seenTParamNames[name] { return nil } - if seen == nil { - seen = make(map[*types.TypeName]bool) + if name == f.obj { + return append(path, opObj) } - seen[name] = true - if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil { + if f.seenTParamNames == nil { + f.seenTParamNames = make(map[*types.TypeName]bool) + } + f.seenTParamNames[name] = true + if r := f.find(T.Constraint(), append(path, opConstraint)); r != nil { return r } return nil @@ -535,11 +560,15 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName] panic(T) } -func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte, seen map[*types.TypeName]bool) []byte { +func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte) []byte { + return (&finder{obj: obj}).findTypeParam(list, path, op) +} + +func (f *finder) findTypeParam(list *types.TypeParamList, path []byte, op byte) []byte { for i := 0; i < list.Len(); i++ { tparam := list.At(i) path2 := appendOpArg(path, op, i) - if r := find(obj, tparam, path2, seen); r != nil { + if r := f.find(tparam, path2); r != nil { return r } } @@ -626,7 +655,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { // Inv: t != nil, obj == nil - t = aliases.Unalias(t) + t = types.Unalias(t) switch code { case opElem: hasElem, ok := t.(hasElem) // Pointer, Slice, Array, Chan, Map @@ -664,7 +693,7 @@ func Object(pkg *types.Package, p Path) (types.Object, error) { t = named.Underlying() case opRhs: - if alias, ok := t.(*aliases.Alias); ok { + if alias, ok := t.(*types.Alias); ok { t = aliases.Rhs(alias) } else if false && aliases.Enabled() { // The Enabled check is too expensive, so for now we diff --git a/vendor/golang.org/x/tools/go/types/typeutil/callee.go b/vendor/golang.org/x/tools/go/types/typeutil/callee.go index 90dc541adf..754380351e 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/callee.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/callee.go @@ -8,7 +8,6 @@ import ( "go/ast" "go/types" - "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/typeparams" ) @@ -17,7 +16,7 @@ import ( // // Functions and methods may potentially have type parameters. func Callee(info *types.Info, call *ast.CallExpr) types.Object { - fun := astutil.Unparen(call.Fun) + fun := ast.Unparen(call.Fun) // Look through type instantiation if necessary. isInstance := false diff --git a/vendor/golang.org/x/tools/go/types/typeutil/map.go b/vendor/golang.org/x/tools/go/types/typeutil/map.go index a92f80dd2d..8d824f7140 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/map.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/map.go @@ -12,7 +12,6 @@ import ( "go/types" "reflect" - "golang.org/x/tools/internal/aliases" "golang.org/x/tools/internal/typeparams" ) @@ -260,8 +259,8 @@ func (h Hasher) hashFor(t types.Type) uint32 { case *types.Basic: return uint32(t.Kind()) - case *aliases.Alias: - return h.Hash(aliases.Unalias(t)) + case *types.Alias: + return h.Hash(types.Unalias(t)) case *types.Array: return 9043 + 2*uint32(t.Len()) + 3*h.Hash(t.Elem()) @@ -461,8 +460,8 @@ func (h Hasher) shallowHash(t types.Type) uint32 { // elements (mostly Slice, Pointer, Basic, Named), // so there's no need to optimize anything else. switch t := t.(type) { - case *aliases.Alias: - return h.shallowHash(aliases.Unalias(t)) + case *types.Alias: + return h.shallowHash(types.Unalias(t)) case *types.Signature: var hash uint32 = 604171 diff --git a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go index bd71aafaaa..f7666028fe 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/methodsetcache.go @@ -9,8 +9,6 @@ package typeutil import ( "go/types" "sync" - - "golang.org/x/tools/internal/aliases" ) // A MethodSetCache records the method set of each type T for which @@ -34,12 +32,12 @@ func (cache *MethodSetCache) MethodSet(T types.Type) *types.MethodSet { cache.mu.Lock() defer cache.mu.Unlock() - switch T := aliases.Unalias(T).(type) { + switch T := types.Unalias(T).(type) { case *types.Named: return cache.lookupNamed(T).value case *types.Pointer: - if N, ok := aliases.Unalias(T.Elem()).(*types.Named); ok { + if N, ok := types.Unalias(T.Elem()).(*types.Named); ok { return cache.lookupNamed(N).pointer } } diff --git a/vendor/golang.org/x/tools/go/types/typeutil/ui.go b/vendor/golang.org/x/tools/go/types/typeutil/ui.go index a0c1a60ac0..9dda6a25df 100644 --- a/vendor/golang.org/x/tools/go/types/typeutil/ui.go +++ b/vendor/golang.org/x/tools/go/types/typeutil/ui.go @@ -8,8 +8,6 @@ package typeutil import ( "go/types" - - "golang.org/x/tools/internal/aliases" ) // IntuitiveMethodSet returns the intuitive method set of a type T, @@ -28,7 +26,7 @@ import ( // The order of the result is as for types.MethodSet(T). func IntuitiveMethodSet(T types.Type, msets *MethodSetCache) []*types.Selection { isPointerToConcrete := func(T types.Type) bool { - ptr, ok := aliases.Unalias(T).(*types.Pointer) + ptr, ok := types.Unalias(T).(*types.Pointer) return ok && !types.IsInterface(ptr.Elem()) } diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases.go b/vendor/golang.org/x/tools/internal/aliases/aliases.go index c24c2eee45..b9425f5a20 100644 --- a/vendor/golang.org/x/tools/internal/aliases/aliases.go +++ b/vendor/golang.org/x/tools/internal/aliases/aliases.go @@ -22,11 +22,17 @@ import ( // GODEBUG=gotypesalias=... by invoking the type checker. The Enabled // function is expensive and should be called once per task (e.g. // package import), not once per call to NewAlias. -func NewAlias(enabled bool, pos token.Pos, pkg *types.Package, name string, rhs types.Type) *types.TypeName { +// +// Precondition: enabled || len(tparams)==0. +// If materialized aliases are disabled, there must not be any type parameters. +func NewAlias(enabled bool, pos token.Pos, pkg *types.Package, name string, rhs types.Type, tparams []*types.TypeParam) *types.TypeName { if enabled { tname := types.NewTypeName(pos, pkg, name, nil) - newAlias(tname, rhs) + SetTypeParams(types.NewAlias(tname, rhs), tparams) return tname } + if len(tparams) > 0 { + panic("cannot create an alias with type parameters when gotypesalias is not enabled") + } return types.NewTypeName(pos, pkg, name, rhs) } diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go deleted file mode 100644 index 6652f7db0f..0000000000 --- a/vendor/golang.org/x/tools/internal/aliases/aliases_go121.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.22 -// +build !go1.22 - -package aliases - -import ( - "go/types" -) - -// Alias is a placeholder for a go/types.Alias for <=1.21. -// It will never be created by go/types. -type Alias struct{} - -func (*Alias) String() string { panic("unreachable") } -func (*Alias) Underlying() types.Type { panic("unreachable") } -func (*Alias) Obj() *types.TypeName { panic("unreachable") } -func Rhs(alias *Alias) types.Type { panic("unreachable") } -func TypeParams(alias *Alias) *types.TypeParamList { panic("unreachable") } -func SetTypeParams(alias *Alias, tparams []*types.TypeParam) { panic("unreachable") } -func TypeArgs(alias *Alias) *types.TypeList { panic("unreachable") } -func Origin(alias *Alias) *Alias { panic("unreachable") } - -// Unalias returns the type t for go <=1.21. -func Unalias(t types.Type) types.Type { return t } - -func newAlias(name *types.TypeName, rhs types.Type) *Alias { panic("unreachable") } - -// Enabled reports whether [NewAlias] should create [types.Alias] types. -// -// Before go1.22, this function always returns false. -func Enabled() bool { return false } diff --git a/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go index 3ef1afeb40..7716a3331d 100644 --- a/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go +++ b/vendor/golang.org/x/tools/internal/aliases/aliases_go122.go @@ -2,9 +2,6 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -//go:build go1.22 -// +build go1.22 - package aliases import ( @@ -14,22 +11,19 @@ import ( "go/types" ) -// Alias is an alias of types.Alias. -type Alias = types.Alias - // Rhs returns the type on the right-hand side of the alias declaration. -func Rhs(alias *Alias) types.Type { +func Rhs(alias *types.Alias) types.Type { if alias, ok := any(alias).(interface{ Rhs() types.Type }); ok { return alias.Rhs() // go1.23+ } // go1.22's Alias didn't have the Rhs method, // so Unalias is the best we can do. - return Unalias(alias) + return types.Unalias(alias) } // TypeParams returns the type parameter list of the alias. -func TypeParams(alias *Alias) *types.TypeParamList { +func TypeParams(alias *types.Alias) *types.TypeParamList { if alias, ok := any(alias).(interface{ TypeParams() *types.TypeParamList }); ok { return alias.TypeParams() // go1.23+ } @@ -37,7 +31,7 @@ func TypeParams(alias *Alias) *types.TypeParamList { } // SetTypeParams sets the type parameters of the alias type. -func SetTypeParams(alias *Alias, tparams []*types.TypeParam) { +func SetTypeParams(alias *types.Alias, tparams []*types.TypeParam) { if alias, ok := any(alias).(interface { SetTypeParams(tparams []*types.TypeParam) }); ok { @@ -48,7 +42,7 @@ func SetTypeParams(alias *Alias, tparams []*types.TypeParam) { } // TypeArgs returns the type arguments used to instantiate the Alias type. -func TypeArgs(alias *Alias) *types.TypeList { +func TypeArgs(alias *types.Alias) *types.TypeList { if alias, ok := any(alias).(interface{ TypeArgs() *types.TypeList }); ok { return alias.TypeArgs() // go1.23+ } @@ -57,26 +51,13 @@ func TypeArgs(alias *Alias) *types.TypeList { // Origin returns the generic Alias type of which alias is an instance. // If alias is not an instance of a generic alias, Origin returns alias. -func Origin(alias *Alias) *Alias { +func Origin(alias *types.Alias) *types.Alias { if alias, ok := any(alias).(interface{ Origin() *types.Alias }); ok { return alias.Origin() // go1.23+ } return alias // not an instance of a generic alias (go1.22) } -// Unalias is a wrapper of types.Unalias. -func Unalias(t types.Type) types.Type { return types.Unalias(t) } - -// newAlias is an internal alias around types.NewAlias. -// Direct usage is discouraged as the moment. -// Try to use NewAlias instead. -func newAlias(tname *types.TypeName, rhs types.Type) *Alias { - a := types.NewAlias(tname, rhs) - // TODO(go.dev/issue/65455): Remove kludgy workaround to set a.actual as a side-effect. - Unalias(a) - return a -} - // Enabled reports whether [NewAlias] should create [types.Alias] types. // // This function is expensive! Call it sparingly. @@ -92,7 +73,7 @@ func Enabled() bool { // many tests. Therefore any attempt to cache the result // is just incorrect. fset := token.NewFileSet() - f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", 0) + f, _ := parser.ParseFile(fset, "a.go", "package p; type A = int", parser.SkipObjectResolution) pkg, _ := new(types.Config).Check("p", fset, []*ast.File{f}, nil) _, enabled := pkg.Scope().Lookup("A").Type().(*types.Alias) return enabled diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go index e0b13e70a0..4ccaa210af 100644 --- a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go +++ b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go @@ -10,6 +10,7 @@ import ( "bytes" "fmt" "go/ast" + "go/scanner" "go/token" "go/types" "os" @@ -17,24 +18,57 @@ import ( "strconv" "golang.org/x/tools/go/analysis" - "golang.org/x/tools/internal/aliases" ) func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos { // Get the end position for the type error. - offset, end := fset.PositionFor(start, false).Offset, start - if offset >= len(src) { - return end + file := fset.File(start) + if file == nil { + return start } - if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 { - end = start + token.Pos(width) + if offset := file.PositionFor(start, false).Offset; offset > len(src) { + return start + } else { + src = src[offset:] + } + + // Attempt to find a reasonable end position for the type error. + // + // TODO(rfindley): the heuristic implemented here is unclear. It looks like + // it seeks the end of the primary operand starting at start, but that is not + // quite implemented (for example, given a func literal this heuristic will + // return the range of the func keyword). + // + // We should formalize this heuristic, or deprecate it by finally proposing + // to add end position to all type checker errors. + // + // Nevertheless, ensure that the end position at least spans the current + // token at the cursor (this was golang/go#69505). + end := start + { + var s scanner.Scanner + fset := token.NewFileSet() + f := fset.AddFile("", fset.Base(), len(src)) + s.Init(f, src, nil /* no error handler */, scanner.ScanComments) + pos, tok, lit := s.Scan() + if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) { + off := file.Offset(pos) + len(lit) + src = src[off:] + end += token.Pos(off) + } + } + + // Look for bytes that might terminate the current operand. See note above: + // this is imprecise. + if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 { + end += token.Pos(width) } return end } func ZeroValue(f *ast.File, pkg *types.Package, typ types.Type) ast.Expr { // TODO(adonovan): think about generics, and also generic aliases. - under := aliases.Unalias(typ) + under := types.Unalias(typ) // Don't call Underlying unconditionally: although it removes // Named and Alias, it also removes TypeParam. if n, ok := under.(*types.Named); ok { @@ -416,8 +450,7 @@ func CheckReadable(pass *analysis.Pass, filename string) error { return nil } for _, f := range pass.Files { - // TODO(adonovan): use go1.20 f.FileStart - if pass.Fset.File(f.Pos()).Name() == filename { + if pass.Fset.File(f.FileStart).Name() == filename { return nil } } diff --git a/vendor/golang.org/x/tools/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go index d98b0db2a9..d79a605ed1 100644 --- a/vendor/golang.org/x/tools/internal/gcimporter/bimport.go +++ b/vendor/golang.org/x/tools/internal/gcimporter/bimport.go @@ -87,64 +87,3 @@ func chanDir(d int) types.ChanDir { return 0 } } - -var predeclOnce sync.Once -var predecl []types.Type // initialized lazily - -func predeclared() []types.Type { - predeclOnce.Do(func() { - // initialize lazily to be sure that all - // elements have been initialized before - predecl = []types.Type{ // basic types - types.Typ[types.Bool], - types.Typ[types.Int], - types.Typ[types.Int8], - types.Typ[types.Int16], - types.Typ[types.Int32], - types.Typ[types.Int64], - types.Typ[types.Uint], - types.Typ[types.Uint8], - types.Typ[types.Uint16], - types.Typ[types.Uint32], - types.Typ[types.Uint64], - types.Typ[types.Uintptr], - types.Typ[types.Float32], - types.Typ[types.Float64], - types.Typ[types.Complex64], - types.Typ[types.Complex128], - types.Typ[types.String], - - // basic type aliases - types.Universe.Lookup("byte").Type(), - types.Universe.Lookup("rune").Type(), - - // error - types.Universe.Lookup("error").Type(), - - // untyped types - types.Typ[types.UntypedBool], - types.Typ[types.UntypedInt], - types.Typ[types.UntypedRune], - types.Typ[types.UntypedFloat], - types.Typ[types.UntypedComplex], - types.Typ[types.UntypedString], - types.Typ[types.UntypedNil], - - // package unsafe - types.Typ[types.UnsafePointer], - - // invalid type - types.Typ[types.Invalid], // only appears in packages with errors - - // used internally by gc; never used by this package or in .a files - anyType{}, - } - predecl = append(predecl, additionalPredeclared()...) - }) - return predecl -} - -type anyType struct{} - -func (t anyType) Underlying() types.Type { return t } -func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go index 39df91124a..e6c5d51f8e 100644 --- a/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go +++ b/vendor/golang.org/x/tools/internal/gcimporter/gcimporter.go @@ -232,14 +232,19 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func // Select appropriate importer. if len(data) > 0 { switch data[0] { - case 'v', 'c', 'd': // binary, till go1.10 + case 'v', 'c', 'd': + // binary: emitted by cmd/compile till go1.10; obsolete. return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0]) - case 'i': // indexed, till go1.19 + case 'i': + // indexed: emitted by cmd/compile till go1.19; + // now used only for serializing go/types. + // See https://github.com/golang/go/issues/69491. _, pkg, err := IImportData(fset, packages, data[1:], id) return pkg, err - case 'u': // unified, from go1.20 + case 'u': + // unified: emitted by cmd/compile since go1.20. _, pkg, err := UImportData(fset, packages, data[1:size], id) return pkg, err diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go index deeb67f315..7dfc31a37d 100644 --- a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go +++ b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go @@ -2,9 +2,227 @@ // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. -// Indexed binary package export. -// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; -// see that file for specification of the format. +// Indexed package export. +// +// The indexed export data format is an evolution of the previous +// binary export data format. Its chief contribution is introducing an +// index table, which allows efficient random access of individual +// declarations and inline function bodies. In turn, this allows +// avoiding unnecessary work for compilation units that import large +// packages. +// +// +// The top-level data format is structured as: +// +// Header struct { +// Tag byte // 'i' +// Version uvarint +// StringSize uvarint +// DataSize uvarint +// } +// +// Strings [StringSize]byte +// Data [DataSize]byte +// +// MainIndex []struct{ +// PkgPath stringOff +// PkgName stringOff +// PkgHeight uvarint +// +// Decls []struct{ +// Name stringOff +// Offset declOff +// } +// } +// +// Fingerprint [8]byte +// +// uvarint means a uint64 written out using uvarint encoding. +// +// []T means a uvarint followed by that many T objects. In other +// words: +// +// Len uvarint +// Elems [Len]T +// +// stringOff means a uvarint that indicates an offset within the +// Strings section. At that offset is another uvarint, followed by +// that many bytes, which form the string value. +// +// declOff means a uvarint that indicates an offset within the Data +// section where the associated declaration can be found. +// +// +// There are five kinds of declarations, distinguished by their first +// byte: +// +// type Var struct { +// Tag byte // 'V' +// Pos Pos +// Type typeOff +// } +// +// type Func struct { +// Tag byte // 'F' or 'G' +// Pos Pos +// TypeParams []typeOff // only present if Tag == 'G' +// Signature Signature +// } +// +// type Const struct { +// Tag byte // 'C' +// Pos Pos +// Value Value +// } +// +// type Type struct { +// Tag byte // 'T' or 'U' +// Pos Pos +// TypeParams []typeOff // only present if Tag == 'U' +// Underlying typeOff +// +// Methods []struct{ // omitted if Underlying is an interface type +// Pos Pos +// Name stringOff +// Recv Param +// Signature Signature +// } +// } +// +// type Alias struct { +// Tag byte // 'A' or 'B' +// Pos Pos +// TypeParams []typeOff // only present if Tag == 'B' +// Type typeOff +// } +// +// // "Automatic" declaration of each typeparam +// type TypeParam struct { +// Tag byte // 'P' +// Pos Pos +// Implicit bool +// Constraint typeOff +// } +// +// typeOff means a uvarint that either indicates a predeclared type, +// or an offset into the Data section. If the uvarint is less than +// predeclReserved, then it indicates the index into the predeclared +// types list (see predeclared in bexport.go for order). Otherwise, +// subtracting predeclReserved yields the offset of a type descriptor. +// +// Value means a type, kind, and type-specific value. See +// (*exportWriter).value for details. +// +// +// There are twelve kinds of type descriptors, distinguished by an itag: +// +// type DefinedType struct { +// Tag itag // definedType +// Name stringOff +// PkgPath stringOff +// } +// +// type PointerType struct { +// Tag itag // pointerType +// Elem typeOff +// } +// +// type SliceType struct { +// Tag itag // sliceType +// Elem typeOff +// } +// +// type ArrayType struct { +// Tag itag // arrayType +// Len uint64 +// Elem typeOff +// } +// +// type ChanType struct { +// Tag itag // chanType +// Dir uint64 // 1 RecvOnly; 2 SendOnly; 3 SendRecv +// Elem typeOff +// } +// +// type MapType struct { +// Tag itag // mapType +// Key typeOff +// Elem typeOff +// } +// +// type FuncType struct { +// Tag itag // signatureType +// PkgPath stringOff +// Signature Signature +// } +// +// type StructType struct { +// Tag itag // structType +// PkgPath stringOff +// Fields []struct { +// Pos Pos +// Name stringOff +// Type typeOff +// Embedded bool +// Note stringOff +// } +// } +// +// type InterfaceType struct { +// Tag itag // interfaceType +// PkgPath stringOff +// Embeddeds []struct { +// Pos Pos +// Type typeOff +// } +// Methods []struct { +// Pos Pos +// Name stringOff +// Signature Signature +// } +// } +// +// // Reference to a type param declaration +// type TypeParamType struct { +// Tag itag // typeParamType +// Name stringOff +// PkgPath stringOff +// } +// +// // Instantiation of a generic type (like List[T2] or List[int]) +// type InstanceType struct { +// Tag itag // instanceType +// Pos pos +// TypeArgs []typeOff +// BaseType typeOff +// } +// +// type UnionType struct { +// Tag itag // interfaceType +// Terms []struct { +// tilde bool +// Type typeOff +// } +// } +// +// +// +// type Signature struct { +// Params []Param +// Results []Param +// Variadic bool // omitted if Results is empty +// } +// +// type Param struct { +// Pos Pos +// Name stringOff +// Type typOff +// } +// +// +// Pos encodes a file:line:column triple, incorporating a simple delta +// encoding scheme within a data object. See exportWriter.pos for +// details. package gcimporter @@ -24,11 +242,30 @@ import ( "golang.org/x/tools/go/types/objectpath" "golang.org/x/tools/internal/aliases" - "golang.org/x/tools/internal/tokeninternal" ) // IExportShallow encodes "shallow" export data for the specified package. // +// For types, we use "shallow" export data. Historically, the Go +// compiler always produced a summary of the types for a given package +// that included types from other packages that it indirectly +// referenced: "deep" export data. This had the advantage that the +// compiler (and analogous tools such as gopls) need only load one +// file per direct import. However, it meant that the files tended to +// get larger based on the level of the package in the import +// graph. For example, higher-level packages in the kubernetes module +// have over 1MB of "deep" export data, even when they have almost no +// content of their own, merely because they mention a major type that +// references many others. In pathological cases the export data was +// 300x larger than the source for a package due to this quadratic +// growth. +// +// "Shallow" export data means that the serialized types describe only +// a single package. If those types mention types from other packages, +// the type checker may need to request additional packages beyond +// just the direct imports. Type information for the entire transitive +// closure of imports is provided (lazily) by the DAG. +// // No promises are made about the encoding other than that it can be decoded by // the same version of IIExportShallow. If you plan to save export data in the // file system, be sure to include a cryptographic digest of the executable in @@ -51,8 +288,8 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc) } // IImportShallow decodes "shallow" types.Package data encoded by -// IExportShallow in the same executable. This function cannot import data from -// cmd/compile or gcexportdata.Write. +// [IExportShallow] in the same executable. This function cannot import data +// from cmd/compile or gcexportdata.Write. // // The importer calls getPackages to obtain package symbols for all // packages mentioned in the export data, including the one being @@ -223,7 +460,7 @@ func (p *iexporter) encodeFile(w *intWriter, file *token.File, needed []uint64) // Sort the set of needed offsets. Duplicates are harmless. sort.Slice(needed, func(i, j int) bool { return needed[i] < needed[j] }) - lines := tokeninternal.GetLines(file) // byte offset of each line start + lines := file.Lines() // byte offset of each line start w.uint64(uint64(len(lines))) // Rather than record the entire array of line start offsets, @@ -507,13 +744,13 @@ func (p *iexporter) doDecl(obj types.Object) { case *types.TypeName: t := obj.Type() - if tparam, ok := aliases.Unalias(t).(*types.TypeParam); ok { + if tparam, ok := types.Unalias(t).(*types.TypeParam); ok { w.tag(typeParamTag) w.pos(obj.Pos()) constraint := tparam.Constraint() if p.version >= iexportVersionGo1_18 { implicit := false - if iface, _ := aliases.Unalias(constraint).(*types.Interface); iface != nil { + if iface, _ := types.Unalias(constraint).(*types.Interface); iface != nil { implicit = iface.IsImplicit() } w.bool(implicit) @@ -523,9 +760,22 @@ func (p *iexporter) doDecl(obj types.Object) { } if obj.IsAlias() { - w.tag(aliasTag) + alias, materialized := t.(*types.Alias) // may fail when aliases are not enabled + + var tparams *types.TypeParamList + if materialized { + tparams = aliases.TypeParams(alias) + } + if tparams.Len() == 0 { + w.tag(aliasTag) + } else { + w.tag(genericAliasTag) + } w.pos(obj.Pos()) - if alias, ok := t.(*aliases.Alias); ok { + if tparams.Len() > 0 { + w.tparamList(obj.Name(), tparams, obj.Pkg()) + } + if materialized { // Preserve materialized aliases, // even of non-exported types. t = aliases.Rhs(alias) @@ -744,8 +994,14 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { }() } switch t := t.(type) { - case *aliases.Alias: - // TODO(adonovan): support parameterized aliases, following *types.Named. + case *types.Alias: + if targs := aliases.TypeArgs(t); targs.Len() > 0 { + w.startType(instanceType) + w.pos(t.Obj().Pos()) + w.typeList(targs, pkg) + w.typ(aliases.Origin(t), pkg) + return + } w.startType(aliasType) w.qualifiedType(t.Obj()) @@ -854,7 +1110,7 @@ func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { for i := 0; i < n; i++ { ft := t.EmbeddedType(i) tPkg := pkg - if named, _ := aliases.Unalias(ft).(*types.Named); named != nil { + if named, _ := types.Unalias(ft).(*types.Named); named != nil { w.pos(named.Obj().Pos()) } else { w.pos(token.NoPos) diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go index 136aa03653..e260c0e8db 100644 --- a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go +++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go @@ -3,7 +3,7 @@ // license that can be found in the LICENSE file. // Indexed package import. -// See cmd/compile/internal/gc/iexport.go for the export data format. +// See iexport.go for the export data format. // This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. @@ -53,6 +53,7 @@ const ( iexportVersionPosCol = 1 iexportVersionGo1_18 = 2 iexportVersionGenerics = 2 + iexportVersion = iexportVersionGenerics iexportVersionCurrent = 2 ) @@ -540,7 +541,7 @@ func canReuse(def *types.Named, rhs types.Type) bool { if def == nil { return true } - iface, _ := aliases.Unalias(rhs).(*types.Interface) + iface, _ := types.Unalias(rhs).(*types.Interface) if iface == nil { return true } @@ -557,19 +558,28 @@ type importReader struct { prevColumn int64 } +// markBlack is redefined in iimport_go123.go, to work around golang/go#69912. +// +// If TypeNames are not marked black (in the sense of go/types cycle +// detection), they may be mutated when dot-imported. Fix this by punching a +// hole through the type, when compiling with Go 1.23. (The bug has been fixed +// for 1.24, but the fix was not worth back-porting). +var markBlack = func(name *types.TypeName) {} + func (r *importReader) obj(name string) { tag := r.byte() pos := r.pos() switch tag { - case aliasTag: + case aliasTag, genericAliasTag: + var tparams []*types.TypeParam + if tag == genericAliasTag { + tparams = r.tparamList() + } typ := r.typ() - // TODO(adonovan): support generic aliases: - // if tag == genericAliasTag { - // tparams := r.tparamList() - // alias.SetTypeParams(tparams) - // } - r.declare(aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ)) + obj := aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ, tparams) + markBlack(obj) // workaround for golang/go#69912 + r.declare(obj) case constTag: typ, val := r.value() @@ -589,6 +599,9 @@ func (r *importReader) obj(name string) { // declaration before recursing. obj := types.NewTypeName(pos, r.currPkg, name, nil) named := types.NewNamed(obj, nil, nil) + + markBlack(obj) // workaround for golang/go#69912 + // Declare obj before calling r.tparamList, so the new type name is recognized // if used in the constraint of one of its own typeparams (see #48280). r.declare(obj) @@ -615,7 +628,7 @@ func (r *importReader) obj(name string) { if targs.Len() > 0 { rparams = make([]*types.TypeParam, targs.Len()) for i := range rparams { - rparams[i] = aliases.Unalias(targs.At(i)).(*types.TypeParam) + rparams[i] = types.Unalias(targs.At(i)).(*types.TypeParam) } } msig := r.signature(recv, rparams, nil) @@ -645,7 +658,7 @@ func (r *importReader) obj(name string) { } constraint := r.typ() if implicit { - iface, _ := aliases.Unalias(constraint).(*types.Interface) + iface, _ := types.Unalias(constraint).(*types.Interface) if iface == nil { errorf("non-interface constraint marked implicit") } @@ -852,7 +865,7 @@ func (r *importReader) typ() types.Type { } func isInterface(t types.Type) bool { - _, ok := aliases.Unalias(t).(*types.Interface) + _, ok := types.Unalias(t).(*types.Interface) return ok } @@ -862,7 +875,7 @@ func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } func (r *importReader) doType(base *types.Named) (res types.Type) { k := r.kind() if debug { - r.p.trace("importing type %d (base: %s)", k, base) + r.p.trace("importing type %d (base: %v)", k, base) r.p.indent++ defer func() { r.p.indent-- @@ -959,7 +972,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) { methods[i] = method } - typ := newInterface(methods, embeddeds) + typ := types.NewInterfaceType(methods, embeddeds) r.p.interfaceList = append(r.p.interfaceList, typ) return typ @@ -1051,7 +1064,7 @@ func (r *importReader) tparamList() []*types.TypeParam { for i := range xs { // Note: the standard library importer is tolerant of nil types here, // though would panic in SetTypeParams. - xs[i] = aliases.Unalias(r.typ()).(*types.TypeParam) + xs[i] = types.Unalias(r.typ()).(*types.TypeParam) } return xs } diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go new file mode 100644 index 0000000000..7586bfaca6 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go @@ -0,0 +1,53 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build go1.22 && !go1.24 + +package gcimporter + +import ( + "go/token" + "go/types" + "unsafe" +) + +// TODO(rfindley): delete this workaround once go1.24 is assured. + +func init() { + // Update markBlack so that it correctly sets the color + // of imported TypeNames. + // + // See the doc comment for markBlack for details. + + type color uint32 + const ( + white color = iota + black + grey + ) + type object struct { + _ *types.Scope + _ token.Pos + _ *types.Package + _ string + _ types.Type + _ uint32 + color_ color + _ token.Pos + } + type typeName struct { + object + } + + // If the size of types.TypeName changes, this will fail to compile. + const delta = int64(unsafe.Sizeof(typeName{})) - int64(unsafe.Sizeof(types.TypeName{})) + var _ [-delta * delta]int + + markBlack = func(obj *types.TypeName) { + type uP = unsafe.Pointer + var ptr *typeName + *(*uP)(uP(&ptr)) = uP(obj) + ptr.color_ = black + } +} diff --git a/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go deleted file mode 100644 index 8b163e3d05..0000000000 --- a/vendor/golang.org/x/tools/internal/gcimporter/newInterface10.go +++ /dev/null @@ -1,22 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.11 -// +build !go1.11 - -package gcimporter - -import "go/types" - -func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { - named := make([]*types.Named, len(embeddeds)) - for i, e := range embeddeds { - var ok bool - named[i], ok = e.(*types.Named) - if !ok { - panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") - } - } - return types.NewInterface(methods, named) -} diff --git a/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go deleted file mode 100644 index 49984f40fd..0000000000 --- a/vendor/golang.org/x/tools/internal/gcimporter/newInterface11.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2018 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.11 -// +build go1.11 - -package gcimporter - -import "go/types" - -func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { - return types.NewInterfaceType(methods, embeddeds) -} diff --git a/vendor/golang.org/x/tools/internal/gcimporter/predeclared.go b/vendor/golang.org/x/tools/internal/gcimporter/predeclared.go new file mode 100644 index 0000000000..907c8557a5 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gcimporter/predeclared.go @@ -0,0 +1,91 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcimporter + +import ( + "go/types" + "sync" +) + +// predecl is a cache for the predeclared types in types.Universe. +// +// Cache a distinct result based on the runtime value of any. +// The pointer value of the any type varies based on GODEBUG settings. +var predeclMu sync.Mutex +var predecl map[types.Type][]types.Type + +func predeclared() []types.Type { + anyt := types.Universe.Lookup("any").Type() + + predeclMu.Lock() + defer predeclMu.Unlock() + + if pre, ok := predecl[anyt]; ok { + return pre + } + + if predecl == nil { + predecl = make(map[types.Type][]types.Type) + } + + decls := []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + + // comparable + types.Universe.Lookup("comparable").Type(), + + // any + anyt, + } + + predecl[anyt] = decls + return decls +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go b/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go deleted file mode 100644 index 0cd3b91b65..0000000000 --- a/vendor/golang.org/x/tools/internal/gcimporter/support_go118.go +++ /dev/null @@ -1,34 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package gcimporter - -import "go/types" - -const iexportVersion = iexportVersionGenerics - -// additionalPredeclared returns additional predeclared types in go.1.18. -func additionalPredeclared() []types.Type { - return []types.Type{ - // comparable - types.Universe.Lookup("comparable").Type(), - - // any - types.Universe.Lookup("any").Type(), - } -} - -// See cmd/compile/internal/types.SplitVargenSuffix. -func splitVargenSuffix(name string) (base, suffix string) { - i := len(name) - for i > 0 && name[i-1] >= '0' && name[i-1] <= '9' { - i-- - } - const dot = "·" - if i >= len(dot) && name[i-len(dot):i] == dot { - i -= len(dot) - return name[:i], name[i:] - } - return name, "" -} diff --git a/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go deleted file mode 100644 index 38b624cada..0000000000 --- a/vendor/golang.org/x/tools/internal/gcimporter/unified_no.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !goexperiment.unified -// +build !goexperiment.unified - -package gcimporter - -const unifiedIR = false diff --git a/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go deleted file mode 100644 index b5118d0b3a..0000000000 --- a/vendor/golang.org/x/tools/internal/gcimporter/unified_yes.go +++ /dev/null @@ -1,10 +0,0 @@ -// Copyright 2022 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build goexperiment.unified -// +build goexperiment.unified - -package gcimporter - -const unifiedIR = true diff --git a/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go index 2c07706887..1db408613c 100644 --- a/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go +++ b/vendor/golang.org/x/tools/internal/gcimporter/ureader_yes.go @@ -52,8 +52,7 @@ func (pr *pkgReader) later(fn func()) { // See cmd/compile/internal/noder.derivedInfo. type derivedInfo struct { - idx pkgbits.Index - needed bool + idx pkgbits.Index } // See cmd/compile/internal/noder.typeInfo. @@ -110,13 +109,17 @@ func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[st r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic) pkg := r.pkg() - r.Bool() // has init + if r.Version().Has(pkgbits.HasInit) { + r.Bool() + } for i, n := 0, r.Len(); i < n; i++ { // As if r.obj(), but avoiding the Scope.Lookup call, // to avoid eager loading of imports. r.Sync(pkgbits.SyncObject) - assert(!r.Bool()) + if r.Version().Has(pkgbits.DerivedFuncInstance) { + assert(!r.Bool()) + } r.p.objIdx(r.Reloc(pkgbits.RelocObj)) assert(r.Len() == 0) } @@ -165,7 +168,7 @@ type readerDict struct { // tparams is a slice of the constructed TypeParams for the element. tparams []*types.TypeParam - // devived is a slice of types derived from tparams, which may be + // derived is a slice of types derived from tparams, which may be // instantiated while reading the current element. derived []derivedInfo derivedTypes []types.Type // lazily instantiated from derived @@ -471,7 +474,9 @@ func (r *reader) param() *types.Var { func (r *reader) obj() (types.Object, []types.Type) { r.Sync(pkgbits.SyncObject) - assert(!r.Bool()) + if r.Version().Has(pkgbits.DerivedFuncInstance) { + assert(!r.Bool()) + } pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj)) obj := pkgScope(pkg).Lookup(name) @@ -525,8 +530,12 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { case pkgbits.ObjAlias: pos := r.pos() + var tparams []*types.TypeParam + if r.Version().Has(pkgbits.AliasTypeParamNames) { + tparams = r.typeParamNames() + } typ := r.typ() - declare(aliases.NewAlias(r.p.aliases, pos, objPkg, objName, typ)) + declare(aliases.NewAlias(r.p.aliases, pos, objPkg, objName, typ, tparams)) case pkgbits.ObjConst: pos := r.pos() @@ -553,7 +562,7 @@ func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) { // If the underlying type is an interface, we need to // duplicate its methods so we can replace the receiver // parameter's type (#49906). - if iface, ok := aliases.Unalias(underlying).(*types.Interface); ok && iface.NumExplicitMethods() != 0 { + if iface, ok := types.Unalias(underlying).(*types.Interface); ok && iface.NumExplicitMethods() != 0 { methods := make([]*types.Func, iface.NumExplicitMethods()) for i := range methods { fn := iface.ExplicitMethod(i) @@ -632,7 +641,10 @@ func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict { dict.derived = make([]derivedInfo, r.Len()) dict.derivedTypes = make([]types.Type, len(dict.derived)) for i := range dict.derived { - dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()} + dict.derived[i] = derivedInfo{idx: r.Reloc(pkgbits.RelocType)} + if r.Version().Has(pkgbits.DerivedInfoNeeded) { + assert(!r.Bool()) + } } pr.retireReader(r) @@ -726,3 +738,17 @@ func pkgScope(pkg *types.Package) *types.Scope { } return types.Universe } + +// See cmd/compile/internal/types.SplitVargenSuffix. +func splitVargenSuffix(name string) (base, suffix string) { + i := len(name) + for i > 0 && name[i-1] >= '0' && name[i-1] <= '9' { + i-- + } + const dot = "·" + if i >= len(dot) && name[i-len(dot):i] == dot { + i -= len(dot) + return name[:i], name[i:] + } + return name, "" +} diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go index 2e59ff8558..e333efc87f 100644 --- a/vendor/golang.org/x/tools/internal/gocommand/invoke.go +++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go @@ -16,7 +16,6 @@ import ( "os" "os/exec" "path/filepath" - "reflect" "regexp" "runtime" "strconv" @@ -250,16 +249,13 @@ func (i *Invocation) run(ctx context.Context, stdout, stderr io.Writer) error { cmd.Stdout = stdout cmd.Stderr = stderr - // cmd.WaitDelay was added only in go1.20 (see #50436). - if waitDelay := reflect.ValueOf(cmd).Elem().FieldByName("WaitDelay"); waitDelay.IsValid() { - // https://go.dev/issue/59541: don't wait forever copying stderr - // after the command has exited. - // After CL 484741 we copy stdout manually, so we we'll stop reading that as - // soon as ctx is done. However, we also don't want to wait around forever - // for stderr. Give a much-longer-than-reasonable delay and then assume that - // something has wedged in the kernel or runtime. - waitDelay.Set(reflect.ValueOf(30 * time.Second)) - } + // https://go.dev/issue/59541: don't wait forever copying stderr + // after the command has exited. + // After CL 484741 we copy stdout manually, so we we'll stop reading that as + // soon as ctx is done. However, we also don't want to wait around forever + // for stderr. Give a much-longer-than-reasonable delay and then assume that + // something has wedged in the kernel or runtime. + cmd.WaitDelay = 30 * time.Second // The cwd gets resolved to the real path. On Darwin, where // /tmp is a symlink, this breaks anything that expects the diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go index dc7d50a7a4..5ae576977a 100644 --- a/vendor/golang.org/x/tools/internal/imports/fix.go +++ b/vendor/golang.org/x/tools/internal/imports/fix.go @@ -27,7 +27,6 @@ import ( "unicode" "unicode/utf8" - "golang.org/x/sync/errgroup" "golang.org/x/tools/go/ast/astutil" "golang.org/x/tools/internal/event" "golang.org/x/tools/internal/gocommand" @@ -91,18 +90,6 @@ type ImportFix struct { Relevance float64 // see pkg } -// An ImportInfo represents a single import statement. -type ImportInfo struct { - ImportPath string // import path, e.g. "crypto/rand". - Name string // import name, e.g. "crand", or "" if none. -} - -// A packageInfo represents what's known about a package. -type packageInfo struct { - name string // real package name, if known. - exports map[string]bool // known exports. -} - // parseOtherFiles parses all the Go files in srcDir except filename, including // test files if filename looks like a test. // @@ -131,7 +118,7 @@ func parseOtherFiles(ctx context.Context, fset *token.FileSet, srcDir, filename continue } - f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0) + f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, parser.SkipObjectResolution) if err != nil { continue } @@ -162,8 +149,8 @@ func addGlobals(f *ast.File, globals map[string]bool) { // collectReferences builds a map of selector expressions, from // left hand side (X) to a set of right hand sides (Sel). -func collectReferences(f *ast.File) references { - refs := references{} +func collectReferences(f *ast.File) References { + refs := References{} var visitor visitFn visitor = func(node ast.Node) ast.Visitor { @@ -233,7 +220,7 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { allFound := true for right := range syms { - if !pkgInfo.exports[right] { + if !pkgInfo.Exports[right] { allFound = false break } @@ -246,11 +233,6 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo { return nil } -// references is set of references found in a Go file. The first map key is the -// left hand side of a selector expression, the second key is the right hand -// side, and the value should always be true. -type references map[string]map[string]bool - // A pass contains all the inputs and state necessary to fix a file's imports. // It can be modified in some ways during use; see comments below. type pass struct { @@ -258,27 +240,29 @@ type pass struct { fset *token.FileSet // fset used to parse f and its siblings. f *ast.File // the file being fixed. srcDir string // the directory containing f. - env *ProcessEnv // the environment to use for go commands, etc. - loadRealPackageNames bool // if true, load package names from disk rather than guessing them. - otherFiles []*ast.File // sibling files. + logf func(string, ...any) + source Source // the environment to use for go commands, etc. + loadRealPackageNames bool // if true, load package names from disk rather than guessing them. + otherFiles []*ast.File // sibling files. + goroot string // Intermediate state, generated by load. existingImports map[string][]*ImportInfo - allRefs references - missingRefs references + allRefs References + missingRefs References // Inputs to fix. These can be augmented between successive fix calls. lastTry bool // indicates that this is the last call and fix should clean up as best it can. candidates []*ImportInfo // candidate imports in priority order. - knownPackages map[string]*packageInfo // information about all known packages. + knownPackages map[string]*PackageInfo // information about all known packages. } // loadPackageNames saves the package names for everything referenced by imports. -func (p *pass) loadPackageNames(imports []*ImportInfo) error { - if p.env.Logf != nil { - p.env.Logf("loading package names for %v packages", len(imports)) +func (p *pass) loadPackageNames(ctx context.Context, imports []*ImportInfo) error { + if p.logf != nil { + p.logf("loading package names for %v packages", len(imports)) defer func() { - p.env.Logf("done loading package names for %v packages", len(imports)) + p.logf("done loading package names for %v packages", len(imports)) }() } var unknown []string @@ -289,20 +273,17 @@ func (p *pass) loadPackageNames(imports []*ImportInfo) error { unknown = append(unknown, imp.ImportPath) } - resolver, err := p.env.GetResolver() - if err != nil { - return err - } - - names, err := resolver.loadPackageNames(unknown, p.srcDir) + names, err := p.source.LoadPackageNames(ctx, p.srcDir, unknown) if err != nil { return err } + // TODO(rfindley): revisit this. Why do we need to store known packages with + // no exports? The inconsistent data is confusing. for path, name := range names { - p.knownPackages[path] = &packageInfo{ - name: name, - exports: map[string]bool{}, + p.knownPackages[path] = &PackageInfo{ + Name: name, + Exports: map[string]bool{}, } } return nil @@ -330,8 +311,8 @@ func (p *pass) importIdentifier(imp *ImportInfo) string { return imp.Name } known := p.knownPackages[imp.ImportPath] - if known != nil && known.name != "" { - return withoutVersion(known.name) + if known != nil && known.Name != "" { + return withoutVersion(known.Name) } return ImportPathToAssumedName(imp.ImportPath) } @@ -339,9 +320,9 @@ func (p *pass) importIdentifier(imp *ImportInfo) string { // load reads in everything necessary to run a pass, and reports whether the // file already has all the imports it needs. It fills in p.missingRefs with the // file's missing symbols, if any, or removes unused imports if not. -func (p *pass) load() ([]*ImportFix, bool) { - p.knownPackages = map[string]*packageInfo{} - p.missingRefs = references{} +func (p *pass) load(ctx context.Context) ([]*ImportFix, bool) { + p.knownPackages = map[string]*PackageInfo{} + p.missingRefs = References{} p.existingImports = map[string][]*ImportInfo{} // Load basic information about the file in question. @@ -364,9 +345,11 @@ func (p *pass) load() ([]*ImportFix, bool) { // f's imports by the identifier they introduce. imports := collectImports(p.f) if p.loadRealPackageNames { - err := p.loadPackageNames(append(imports, p.candidates...)) + err := p.loadPackageNames(ctx, append(imports, p.candidates...)) if err != nil { - p.env.logf("loading package names: %v", err) + if p.logf != nil { + p.logf("loading package names: %v", err) + } return nil, false } } @@ -535,9 +518,10 @@ func (p *pass) assumeSiblingImportsValid() { // We have the stdlib in memory; no need to guess. rights = symbolNameSet(m) } - p.addCandidate(imp, &packageInfo{ + // TODO(rfindley): we should set package name here, for consistency. + p.addCandidate(imp, &PackageInfo{ // no name; we already know it. - exports: rights, + Exports: rights, }) } } @@ -546,14 +530,14 @@ func (p *pass) assumeSiblingImportsValid() { // addCandidate adds a candidate import to p, and merges in the information // in pkg. -func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) { +func (p *pass) addCandidate(imp *ImportInfo, pkg *PackageInfo) { p.candidates = append(p.candidates, imp) if existing, ok := p.knownPackages[imp.ImportPath]; ok { - if existing.name == "" { - existing.name = pkg.name + if existing.Name == "" { + existing.Name = pkg.Name } - for export := range pkg.exports { - existing.exports[export] = true + for export := range pkg.Exports { + existing.Exports[export] = true } } else { p.knownPackages[imp.ImportPath] = pkg @@ -581,19 +565,42 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P // getFixes gets the import fixes that need to be made to f in order to fix the imports. // It does not modify the ast. func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) { + source, err := NewProcessEnvSource(env, filename, f.Name.Name) + if err != nil { + return nil, err + } + goEnv, err := env.goEnv() + if err != nil { + return nil, err + } + return getFixesWithSource(ctx, fset, f, filename, goEnv["GOROOT"], env.logf, source) +} + +func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, goroot string, logf func(string, ...any), source Source) ([]*ImportFix, error) { + // This logic is defensively duplicated from getFixes. abs, err := filepath.Abs(filename) if err != nil { return nil, err } srcDir := filepath.Dir(abs) - env.logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + + if logf != nil { + logf("fixImports(filename=%q), srcDir=%q ...", filename, abs, srcDir) + } // First pass: looking only at f, and using the naive algorithm to // derive package names from import paths, see if the file is already // complete. We can't add any imports yet, because we don't know // if missing references are actually package vars. - p := &pass{fset: fset, f: f, srcDir: srcDir, env: env} - if fixes, done := p.load(); done { + p := &pass{ + fset: fset, + f: f, + srcDir: srcDir, + logf: logf, + goroot: goroot, + source: source, + } + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -605,7 +612,7 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st // Second pass: add information from other files in the same package, // like their package vars and imports. p.otherFiles = otherFiles - if fixes, done := p.load(); done { + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -618,10 +625,17 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st // Third pass: get real package names where we had previously used // the naive algorithm. - p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} + p = &pass{ + fset: fset, + f: f, + srcDir: srcDir, + logf: logf, + goroot: goroot, + source: p.source, // safe to reuse, as it's just a wrapper around env + } p.loadRealPackageNames = true p.otherFiles = otherFiles - if fixes, done := p.load(); done { + if fixes, done := p.load(ctx); done { return fixes, nil } @@ -835,7 +849,7 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP return true }, dirFound: func(pkg *pkg) bool { - return pkgIsCandidate(filename, references{searchPkg: nil}, pkg) + return pkgIsCandidate(filename, References{searchPkg: nil}, pkg) }, packageNameLoaded: func(pkg *pkg) bool { return pkg.packageName == searchPkg @@ -1086,11 +1100,7 @@ func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) return e.GocmdRunner.Run(ctx, inv) } -func addStdlibCandidates(pass *pass, refs references) error { - goenv, err := pass.env.goEnv() - if err != nil { - return err - } +func addStdlibCandidates(pass *pass, refs References) error { localbase := func(nm string) string { ans := path.Base(nm) if ans[0] == 'v' { @@ -1105,13 +1115,13 @@ func addStdlibCandidates(pass *pass, refs references) error { } add := func(pkg string) { // Prevent self-imports. - if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir { + if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.goroot, "src", pkg) == pass.srcDir { return } exports := symbolNameSet(stdlib.PackageSymbols[pkg]) pass.addCandidate( &ImportInfo{ImportPath: pkg}, - &packageInfo{name: localbase(pkg), exports: exports}) + &PackageInfo{Name: localbase(pkg), Exports: exports}) } for left := range refs { if left == "rand" { @@ -1175,91 +1185,14 @@ type scanCallback struct { exportsLoaded func(pkg *pkg, exports []stdlib.Symbol) } -func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error { +func addExternalCandidates(ctx context.Context, pass *pass, refs References, filename string) error { ctx, done := event.Start(ctx, "imports.addExternalCandidates") defer done() - var mu sync.Mutex - found := make(map[string][]pkgDistance) - callback := &scanCallback{ - rootFound: func(gopathwalk.Root) bool { - return true // We want everything. - }, - dirFound: func(pkg *pkg) bool { - return pkgIsCandidate(filename, refs, pkg) - }, - packageNameLoaded: func(pkg *pkg) bool { - if _, want := refs[pkg.packageName]; !want { - return false - } - if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName { - // The candidate is in the same directory and has the - // same package name. Don't try to import ourselves. - return false - } - if !canUse(filename, pkg.dir) { - return false - } - mu.Lock() - defer mu.Unlock() - found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)}) - return false // We'll do our own loading after we sort. - }, - } - resolver, err := pass.env.GetResolver() + results, err := pass.source.ResolveReferences(ctx, filename, refs) if err != nil { return err } - if err = resolver.scan(ctx, callback); err != nil { - return err - } - - // Search for imports matching potential package references. - type result struct { - imp *ImportInfo - pkg *packageInfo - } - results := make([]*result, len(refs)) - - g, ctx := errgroup.WithContext(ctx) - - searcher := symbolSearcher{ - logf: pass.env.logf, - srcDir: pass.srcDir, - xtest: strings.HasSuffix(pass.f.Name.Name, "_test"), - loadExports: resolver.loadExports, - } - - i := 0 - for pkgName, symbols := range refs { - index := i // claim an index in results - i++ - pkgName := pkgName - symbols := symbols - - g.Go(func() error { - found, err := searcher.search(ctx, found[pkgName], pkgName, symbols) - if err != nil { - return err - } - if found == nil { - return nil // No matching package. - } - - imp := &ImportInfo{ - ImportPath: found.importPathShort, - } - pkg := &packageInfo{ - name: pkgName, - exports: symbols, - } - results[index] = &result{imp, pkg} - return nil - }) - } - if err := g.Wait(); err != nil { - return err - } for _, result := range results { if result == nil { @@ -1267,7 +1200,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil } // Don't offer completions that would shadow predeclared // names, such as github.com/coreos/etcd/error. - if types.Universe.Lookup(result.pkg.name) != nil { // predeclared + if types.Universe.Lookup(result.Package.Name) != nil { // predeclared // Ideally we would skip this candidate only // if the predeclared name is actually // referenced by the file, but that's a lot @@ -1276,7 +1209,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil // user before long. continue } - pass.addCandidate(result.imp, result.pkg) + pass.addCandidate(result.Import, result.Package) } return nil } @@ -1620,6 +1553,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, incl } fullFile := filepath.Join(dir, fi.Name()) + // Legacy ast.Object resolution is needed here. f, err := parser.ParseFile(fset, fullFile, nil, 0) if err != nil { env.logf("error parsing %v: %v", fullFile, err) @@ -1800,7 +1734,7 @@ func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols m // filename is the file being formatted. // pkgIdent is the package being searched for, like "client" (if // searching for "client.New") -func pkgIsCandidate(filename string, refs references, pkg *pkg) bool { +func pkgIsCandidate(filename string, refs References, pkg *pkg) bool { // Check "internal" and "vendor" visibility: if !canUse(filename, pkg.dir) { return false diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go index f83465520a..2215a12880 100644 --- a/vendor/golang.org/x/tools/internal/imports/imports.go +++ b/vendor/golang.org/x/tools/internal/imports/imports.go @@ -47,7 +47,14 @@ type Options struct { // Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env. func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) { fileSet := token.NewFileSet() - file, adjust, err := parse(fileSet, filename, src, opt) + var parserMode parser.Mode + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + file, adjust, err := parse(fileSet, filename, src, parserMode, opt.Fragment) if err != nil { return nil, err } @@ -66,17 +73,19 @@ func Process(filename string, src []byte, opt *Options) (formatted []byte, err e // // Note that filename's directory influences which imports can be chosen, // so it is important that filename be accurate. -func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) { +func FixImports(ctx context.Context, filename string, src []byte, goroot string, logf func(string, ...any), source Source) (fixes []*ImportFix, err error) { ctx, done := event.Start(ctx, "imports.FixImports") defer done() fileSet := token.NewFileSet() - file, _, err := parse(fileSet, filename, src, opt) + // TODO(rfindley): these default values for ParseComments and AllErrors were + // extracted from gopls, but are they even needed? + file, _, err := parse(fileSet, filename, src, parser.ParseComments|parser.AllErrors, true) if err != nil { return nil, err } - return getFixes(ctx, fileSet, file, filename, opt.Env) + return getFixesWithSource(ctx, fileSet, file, filename, goroot, logf, source) } // ApplyFixes applies all of the fixes to the file and formats it. extraMode @@ -86,7 +95,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e // Don't use parse() -- we don't care about fragments or statement lists // here, and we need to work with unparseable files. fileSet := token.NewFileSet() - parserMode := parser.Mode(0) + parserMode := parser.SkipObjectResolution if opt.Comments { parserMode |= parser.ParseComments } @@ -114,7 +123,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e // formatted file, and returns the postpocessed result. func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) { mergeImports(file) - sortImports(opt.LocalPrefix, fset.File(file.Pos()), file) + sortImports(opt.LocalPrefix, fset.File(file.FileStart), file) var spacesBefore []string // import paths we need spaces before for _, impSection := range astutil.Imports(fset, file) { // Within each block of contiguous imports, see if any @@ -164,13 +173,9 @@ func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(ori // parse parses src, which was read from filename, // as a Go source file or statement list. -func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { - parserMode := parser.Mode(0) - if opt.Comments { - parserMode |= parser.ParseComments - } - if opt.AllErrors { - parserMode |= parser.AllErrors +func parse(fset *token.FileSet, filename string, src []byte, parserMode parser.Mode, fragment bool) (*ast.File, func(orig, src []byte) []byte, error) { + if parserMode&parser.SkipObjectResolution != 0 { + panic("legacy ast.Object resolution is required") } // Try as whole source file. @@ -181,7 +186,7 @@ func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast // If the error is that the source file didn't begin with a // package line and we accept fragmented input, fall through to // try as a source fragment. Stop and return on any other error. - if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + if !fragment || !strings.Contains(err.Error(), "expected 'package'") { return nil, nil, err } diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go index 91221fda32..8555e3f83d 100644 --- a/vendor/golang.org/x/tools/internal/imports/mod.go +++ b/vendor/golang.org/x/tools/internal/imports/mod.go @@ -245,7 +245,10 @@ func newModuleResolver(e *ProcessEnv, moduleCacheCache *DirInfoCache) (*ModuleRe // 2. Use this to separate module cache scanning from other scanning. func gomodcacheForEnv(goenv map[string]string) string { if gmc := goenv["GOMODCACHE"]; gmc != "" { - return gmc + // golang/go#67156: ensure that the module cache is clean, since it is + // assumed as a prefix to directories scanned by gopathwalk, which are + // themselves clean. + return filepath.Clean(gmc) } gopaths := filepath.SplitList(goenv["GOPATH"]) if len(gopaths) == 0 { @@ -740,8 +743,8 @@ func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo { subdir := "" - if dir != root.Path { - subdir = dir[len(root.Path)+len("/"):] + if prefix := root.Path + string(filepath.Separator); strings.HasPrefix(dir, prefix) { + subdir = dir[len(prefix):] } importPath := filepath.ToSlash(subdir) if strings.HasPrefix(importPath, "vendor/") { diff --git a/vendor/golang.org/x/tools/internal/imports/source.go b/vendor/golang.org/x/tools/internal/imports/source.go new file mode 100644 index 0000000000..5d2aeeebc9 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/source.go @@ -0,0 +1,63 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import "context" + +// These types document the APIs below. +// +// TODO(rfindley): consider making these defined types rather than aliases. +type ( + ImportPath = string + PackageName = string + Symbol = string + + // References is set of References found in a Go file. The first map key is the + // left hand side of a selector expression, the second key is the right hand + // side, and the value should always be true. + References = map[PackageName]map[Symbol]bool +) + +// A Result satisfies a missing import. +// +// The Import field describes the missing import spec, and the Package field +// summarizes the package exports. +type Result struct { + Import *ImportInfo + Package *PackageInfo +} + +// An ImportInfo represents a single import statement. +type ImportInfo struct { + ImportPath string // import path, e.g. "crypto/rand". + Name string // import name, e.g. "crand", or "" if none. +} + +// A PackageInfo represents what's known about a package. +type PackageInfo struct { + Name string // package name in the package declaration, if known + Exports map[string]bool // set of names of known package level sortSymbols +} + +// A Source provides imports to satisfy unresolved references in the file being +// fixed. +type Source interface { + // LoadPackageNames queries PackageName information for the requested import + // paths, when operating from the provided srcDir. + // + // TODO(rfindley): try to refactor to remove this operation. + LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error) + + // ResolveReferences asks the Source for the best package name to satisfy + // each of the missing references, in the context of fixing the given + // filename. + // + // Returns a map from package name to a [Result] for that package name that + // provides the required symbols. Keys may be omitted in the map if no + // candidates satisfy all missing references for that package name. It is up + // to each data source to select the best result for each entry in the + // missing map. + ResolveReferences(ctx context.Context, filename string, missing References) (map[PackageName]*Result, error) +} diff --git a/vendor/golang.org/x/tools/internal/imports/source_env.go b/vendor/golang.org/x/tools/internal/imports/source_env.go new file mode 100644 index 0000000000..ff9555d287 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/source_env.go @@ -0,0 +1,125 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "context" + "path/filepath" + "strings" + "sync" + + "golang.org/x/sync/errgroup" + "golang.org/x/tools/internal/gopathwalk" +) + +// ProcessEnvSource implements the [Source] interface using the legacy +// [ProcessEnv] abstraction. +type ProcessEnvSource struct { + env *ProcessEnv + srcDir string + filename string + pkgName string +} + +// NewProcessEnvSource returns a [ProcessEnvSource] wrapping the given +// env, to be used for fixing imports in the file with name filename in package +// named pkgName. +func NewProcessEnvSource(env *ProcessEnv, filename, pkgName string) (*ProcessEnvSource, error) { + abs, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + srcDir := filepath.Dir(abs) + return &ProcessEnvSource{ + env: env, + srcDir: srcDir, + filename: filename, + pkgName: pkgName, + }, nil +} + +func (s *ProcessEnvSource) LoadPackageNames(ctx context.Context, srcDir string, unknown []string) (map[string]string, error) { + r, err := s.env.GetResolver() + if err != nil { + return nil, err + } + return r.loadPackageNames(unknown, srcDir) +} + +func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename string, refs map[string]map[string]bool) (map[string]*Result, error) { + var mu sync.Mutex + found := make(map[string][]pkgDistance) + callback := &scanCallback{ + rootFound: func(gopathwalk.Root) bool { + return true // We want everything. + }, + dirFound: func(pkg *pkg) bool { + return pkgIsCandidate(filename, refs, pkg) + }, + packageNameLoaded: func(pkg *pkg) bool { + if _, want := refs[pkg.packageName]; !want { + return false + } + if pkg.dir == s.srcDir && s.pkgName == pkg.packageName { + // The candidate is in the same directory and has the + // same package name. Don't try to import ourselves. + return false + } + if !canUse(filename, pkg.dir) { + return false + } + mu.Lock() + defer mu.Unlock() + found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(s.srcDir, pkg.dir)}) + return false // We'll do our own loading after we sort. + }, + } + resolver, err := s.env.GetResolver() + if err != nil { + return nil, err + } + if err := resolver.scan(ctx, callback); err != nil { + return nil, err + } + + g, ctx := errgroup.WithContext(ctx) + + searcher := symbolSearcher{ + logf: s.env.logf, + srcDir: s.srcDir, + xtest: strings.HasSuffix(s.pkgName, "_test"), + loadExports: resolver.loadExports, + } + + var resultMu sync.Mutex + results := make(map[string]*Result, len(refs)) + for pkgName, symbols := range refs { + g.Go(func() error { + found, err := searcher.search(ctx, found[pkgName], pkgName, symbols) + if err != nil { + return err + } + if found == nil { + return nil // No matching package. + } + + imp := &ImportInfo{ + ImportPath: found.importPathShort, + } + pkg := &PackageInfo{ + Name: pkgName, + Exports: symbols, + } + resultMu.Lock() + results[pkgName] = &Result{Import: imp, Package: pkg} + resultMu.Unlock() + return nil + }) + } + if err := g.Wait(); err != nil { + return nil, err + } + return results, nil +} diff --git a/vendor/golang.org/x/tools/internal/pkgbits/decoder.go b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go index b92e8e6eb3..f6cb37c5c3 100644 --- a/vendor/golang.org/x/tools/internal/pkgbits/decoder.go +++ b/vendor/golang.org/x/tools/internal/pkgbits/decoder.go @@ -21,7 +21,7 @@ import ( // export data. type PkgDecoder struct { // version is the file format version. - version uint32 + version Version // sync indicates whether the file uses sync markers. sync bool @@ -68,8 +68,6 @@ func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync } // NewPkgDecoder returns a PkgDecoder initialized to read the Unified // IR export data from input. pkgPath is the package path for the // compilation unit that produced the export data. -// -// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014. func NewPkgDecoder(pkgPath, input string) PkgDecoder { pr := PkgDecoder{ pkgPath: pkgPath, @@ -80,14 +78,15 @@ func NewPkgDecoder(pkgPath, input string) PkgDecoder { r := strings.NewReader(input) - assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil) + var ver uint32 + assert(binary.Read(r, binary.LittleEndian, &ver) == nil) + pr.version = Version(ver) - switch pr.version { - default: - panic(fmt.Errorf("unsupported version: %v", pr.version)) - case 0: - // no flags - case 1: + if pr.version >= numVersions { + panic(fmt.Errorf("cannot decode %q, export data version %d is greater than maximum supported version %d", pkgPath, pr.version, numVersions-1)) + } + + if pr.version.Has(Flags) { var flags uint32 assert(binary.Read(r, binary.LittleEndian, &flags) == nil) pr.sync = flags&flagSyncMarkers != 0 @@ -102,7 +101,9 @@ func NewPkgDecoder(pkgPath, input string) PkgDecoder { assert(err == nil) pr.elemData = input[pos:] - assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1])) + + const fingerprintSize = 8 + assert(len(pr.elemData)-fingerprintSize == int(pr.elemEnds[len(pr.elemEnds)-1])) return pr } @@ -136,7 +137,7 @@ func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int { absIdx += int(pr.elemEndsEnds[k-1]) } if absIdx >= int(pr.elemEndsEnds[k]) { - errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds) + panicf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds) } return absIdx } @@ -193,9 +194,7 @@ func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder { Idx: idx, } - // TODO(mdempsky) r.data.Reset(...) after #44505 is resolved. - r.Data = *strings.NewReader(pr.DataIdx(k, idx)) - + r.Data.Reset(pr.DataIdx(k, idx)) r.Sync(SyncRelocs) r.Relocs = make([]RelocEnt, r.Len()) for i := range r.Relocs { @@ -244,7 +243,7 @@ type Decoder struct { func (r *Decoder) checkErr(err error) { if err != nil { - errorf("unexpected decoding error: %w", err) + panicf("unexpected decoding error: %w", err) } } @@ -515,3 +514,6 @@ func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) { return path, name, tag } + +// Version reports the version of the bitstream. +func (w *Decoder) Version() Version { return w.common.version } diff --git a/vendor/golang.org/x/tools/internal/pkgbits/encoder.go b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go index 6482617a4f..c17a12399d 100644 --- a/vendor/golang.org/x/tools/internal/pkgbits/encoder.go +++ b/vendor/golang.org/x/tools/internal/pkgbits/encoder.go @@ -12,18 +12,15 @@ import ( "io" "math/big" "runtime" + "strings" ) -// currentVersion is the current version number. -// -// - v0: initial prototype -// -// - v1: adds the flags uint32 word -const currentVersion uint32 = 1 - // A PkgEncoder provides methods for encoding a package's Unified IR // export data. type PkgEncoder struct { + // version of the bitstream. + version Version + // elems holds the bitstream for previously encoded elements. elems [numRelocs][]string @@ -47,8 +44,9 @@ func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 } // export data files, but can help diagnosing desync errors in // higher-level Unified IR reader/writer code. If syncFrames is // negative, then sync markers are omitted entirely. -func NewPkgEncoder(syncFrames int) PkgEncoder { +func NewPkgEncoder(version Version, syncFrames int) PkgEncoder { return PkgEncoder{ + version: version, stringsIdx: make(map[string]Index), syncFrames: syncFrames, } @@ -64,13 +62,15 @@ func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) { assert(binary.Write(out, binary.LittleEndian, x) == nil) } - writeUint32(currentVersion) + writeUint32(uint32(pw.version)) - var flags uint32 - if pw.SyncMarkers() { - flags |= flagSyncMarkers + if pw.version.Has(Flags) { + var flags uint32 + if pw.SyncMarkers() { + flags |= flagSyncMarkers + } + writeUint32(flags) } - writeUint32(flags) // Write elemEndsEnds. var sum uint32 @@ -159,7 +159,7 @@ type Encoder struct { // Flush finalizes the element's bitstream and returns its Index. func (w *Encoder) Flush() Index { - var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved + var sb strings.Builder // Backup the data so we write the relocations at the front. var tmp bytes.Buffer @@ -189,7 +189,7 @@ func (w *Encoder) Flush() Index { func (w *Encoder) checkErr(err error) { if err != nil { - errorf("unexpected encoding error: %v", err) + panicf("unexpected encoding error: %v", err) } } @@ -320,8 +320,14 @@ func (w *Encoder) Code(c Code) { // section (if not already present), and then writing a relocation // into the element bitstream. func (w *Encoder) String(s string) { + w.StringRef(w.p.StringIdx(s)) +} + +// StringRef writes a reference to the given index, which must be a +// previously encoded string value. +func (w *Encoder) StringRef(idx Index) { w.Sync(SyncString) - w.Reloc(RelocString, w.p.StringIdx(s)) + w.Reloc(RelocString, idx) } // Strings encodes and writes a variable-length slice of strings into @@ -348,7 +354,7 @@ func (w *Encoder) Value(val constant.Value) { func (w *Encoder) scalar(val constant.Value) { switch v := constant.Val(val).(type) { default: - errorf("unhandled %v (%v)", val, val.Kind()) + panicf("unhandled %v (%v)", val, val.Kind()) case bool: w.Code(ValBool) w.Bool(v) @@ -381,3 +387,6 @@ func (w *Encoder) bigFloat(v *big.Float) { b := v.Append(nil, 'p', -1) w.String(string(b)) // TODO: More efficient encoding. } + +// Version reports the version of the bitstream. +func (w *Encoder) Version() Version { return w.p.version } diff --git a/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go deleted file mode 100644 index 5294f6a63e..0000000000 --- a/vendor/golang.org/x/tools/internal/pkgbits/frames_go1.go +++ /dev/null @@ -1,21 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.7 -// +build !go1.7 - -// TODO(mdempsky): Remove after #44505 is resolved - -package pkgbits - -import "runtime" - -func walkFrames(pcs []uintptr, visit frameVisitor) { - for _, pc := range pcs { - fn := runtime.FuncForPC(pc) - file, line := fn.FileLine(pc) - - visit(file, line, fn.Name(), pc-fn.Entry()) - } -} diff --git a/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go b/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go deleted file mode 100644 index 2324ae7adf..0000000000 --- a/vendor/golang.org/x/tools/internal/pkgbits/frames_go17.go +++ /dev/null @@ -1,28 +0,0 @@ -// Copyright 2021 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.7 -// +build go1.7 - -package pkgbits - -import "runtime" - -// walkFrames calls visit for each call frame represented by pcs. -// -// pcs should be a slice of PCs, as returned by runtime.Callers. -func walkFrames(pcs []uintptr, visit frameVisitor) { - if len(pcs) == 0 { - return - } - - frames := runtime.CallersFrames(pcs) - for { - frame, more := frames.Next() - visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry) - if !more { - return - } - } -} diff --git a/vendor/golang.org/x/tools/internal/pkgbits/support.go b/vendor/golang.org/x/tools/internal/pkgbits/support.go index ad26d3b28c..50534a2955 100644 --- a/vendor/golang.org/x/tools/internal/pkgbits/support.go +++ b/vendor/golang.org/x/tools/internal/pkgbits/support.go @@ -12,6 +12,6 @@ func assert(b bool) { } } -func errorf(format string, args ...interface{}) { +func panicf(format string, args ...any) { panic(fmt.Errorf(format, args...)) } diff --git a/vendor/golang.org/x/tools/internal/pkgbits/sync.go b/vendor/golang.org/x/tools/internal/pkgbits/sync.go index 5bd51ef717..1520b73afb 100644 --- a/vendor/golang.org/x/tools/internal/pkgbits/sync.go +++ b/vendor/golang.org/x/tools/internal/pkgbits/sync.go @@ -6,6 +6,7 @@ package pkgbits import ( "fmt" + "runtime" "strings" ) @@ -23,6 +24,24 @@ func fmtFrames(pcs ...uintptr) []string { type frameVisitor func(file string, line int, name string, offset uintptr) +// walkFrames calls visit for each call frame represented by pcs. +// +// pcs should be a slice of PCs, as returned by runtime.Callers. +func walkFrames(pcs []uintptr, visit frameVisitor) { + if len(pcs) == 0 { + return + } + + frames := runtime.CallersFrames(pcs) + for { + frame, more := frames.Next() + visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry) + if !more { + return + } + } +} + // SyncMarker is an enum type that represents markers that may be // written to export data to ensure the reader and writer stay // synchronized. @@ -110,4 +129,8 @@ const ( SyncStmtsEnd SyncLabel SyncOptLabel + + SyncMultiExpr + SyncRType + SyncConvRTTI ) diff --git a/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go index 4a5b0ca5f2..582ad56d3e 100644 --- a/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go +++ b/vendor/golang.org/x/tools/internal/pkgbits/syncmarker_string.go @@ -74,11 +74,14 @@ func _() { _ = x[SyncStmtsEnd-64] _ = x[SyncLabel-65] _ = x[SyncOptLabel-66] + _ = x[SyncMultiExpr-67] + _ = x[SyncRType-68] + _ = x[SyncConvRTTI-69] } -const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel" +const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabelMultiExprRTypeConvRTTI" -var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458} +var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458, 467, 472, 480} func (i SyncMarker) String() string { i -= 1 diff --git a/vendor/golang.org/x/tools/internal/pkgbits/version.go b/vendor/golang.org/x/tools/internal/pkgbits/version.go new file mode 100644 index 0000000000..53af9df22b --- /dev/null +++ b/vendor/golang.org/x/tools/internal/pkgbits/version.go @@ -0,0 +1,85 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package pkgbits + +// Version indicates a version of a unified IR bitstream. +// Each Version indicates the addition, removal, or change of +// new data in the bitstream. +// +// These are serialized to disk and the interpretation remains fixed. +type Version uint32 + +const ( + // V0: initial prototype. + // + // All data that is not assigned a Field is in version V0 + // and has not been deprecated. + V0 Version = iota + + // V1: adds the Flags uint32 word + V1 + + // V2: removes unused legacy fields and supports type parameters for aliases. + // - remove the legacy "has init" bool from the public root + // - remove obj's "derived func instance" bool + // - add a TypeParamNames field to ObjAlias + // - remove derived info "needed" bool + V2 + + numVersions = iota +) + +// Field denotes a unit of data in the serialized unified IR bitstream. +// It is conceptually a like field in a structure. +// +// We only really need Fields when the data may or may not be present +// in a stream based on the Version of the bitstream. +// +// Unlike much of pkgbits, Fields are not serialized and +// can change values as needed. +type Field int + +const ( + // Flags in a uint32 in the header of a bitstream + // that is used to indicate whether optional features are enabled. + Flags Field = iota + + // Deprecated: HasInit was a bool indicating whether a package + // has any init functions. + HasInit + + // Deprecated: DerivedFuncInstance was a bool indicating + // whether an object was a function instance. + DerivedFuncInstance + + // ObjAlias has a list of TypeParamNames. + AliasTypeParamNames + + // Deprecated: DerivedInfoNeeded was a bool indicating + // whether a type was a derived type. + DerivedInfoNeeded + + numFields = iota +) + +// introduced is the version a field was added. +var introduced = [numFields]Version{ + Flags: V1, + AliasTypeParamNames: V2, +} + +// removed is the version a field was removed in or 0 for fields +// that have not yet been deprecated. +// (So removed[f]-1 is the last version it is included in.) +var removed = [numFields]Version{ + HasInit: V2, + DerivedFuncInstance: V2, + DerivedInfoNeeded: V2, +} + +// Has reports whether field f is present in a bitstream at version v. +func (v Version) Has(f Field) bool { + return introduced[f] <= v && (v < removed[f] || removed[f] == V0) +} diff --git a/vendor/golang.org/x/tools/internal/stdlib/manifest.go b/vendor/golang.org/x/tools/internal/stdlib/manifest.go index a928acf29f..cdaac9ab34 100644 --- a/vendor/golang.org/x/tools/internal/stdlib/manifest.go +++ b/vendor/golang.org/x/tools/internal/stdlib/manifest.go @@ -951,7 +951,7 @@ var PackageSymbols = map[string][]Symbol{ {"ParseSessionState", Func, 21}, {"QUICClient", Func, 21}, {"QUICConfig", Type, 21}, - {"QUICConfig.EnableStoreSessionEvent", Field, 23}, + {"QUICConfig.EnableSessionEvents", Field, 23}, {"QUICConfig.TLSConfig", Field, 21}, {"QUICConn", Type, 21}, {"QUICEncryptionLevel", Type, 21}, diff --git a/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go b/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go deleted file mode 100644 index ff9437a36c..0000000000 --- a/vendor/golang.org/x/tools/internal/tokeninternal/tokeninternal.go +++ /dev/null @@ -1,137 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// package tokeninternal provides access to some internal features of the token -// package. -package tokeninternal - -import ( - "fmt" - "go/token" - "sort" - "sync" - "unsafe" -) - -// GetLines returns the table of line-start offsets from a token.File. -func GetLines(file *token.File) []int { - // token.File has a Lines method on Go 1.21 and later. - if file, ok := (interface{})(file).(interface{ Lines() []int }); ok { - return file.Lines() - } - - // This declaration must match that of token.File. - // This creates a risk of dependency skew. - // For now we check that the size of the two - // declarations is the same, on the (fragile) assumption - // that future changes would add fields. - type tokenFile119 struct { - _ string - _ int - _ int - mu sync.Mutex // we're not complete monsters - lines []int - _ []struct{} - } - - if unsafe.Sizeof(*file) != unsafe.Sizeof(tokenFile119{}) { - panic("unexpected token.File size") - } - var ptr *tokenFile119 - type uP = unsafe.Pointer - *(*uP)(uP(&ptr)) = uP(file) - ptr.mu.Lock() - defer ptr.mu.Unlock() - return ptr.lines -} - -// AddExistingFiles adds the specified files to the FileSet if they -// are not already present. It panics if any pair of files in the -// resulting FileSet would overlap. -func AddExistingFiles(fset *token.FileSet, files []*token.File) { - // Punch through the FileSet encapsulation. - type tokenFileSet struct { - // This type remained essentially consistent from go1.16 to go1.21. - mutex sync.RWMutex - base int - files []*token.File - _ *token.File // changed to atomic.Pointer[token.File] in go1.19 - } - - // If the size of token.FileSet changes, this will fail to compile. - const delta = int64(unsafe.Sizeof(tokenFileSet{})) - int64(unsafe.Sizeof(token.FileSet{})) - var _ [-delta * delta]int - - type uP = unsafe.Pointer - var ptr *tokenFileSet - *(*uP)(uP(&ptr)) = uP(fset) - ptr.mutex.Lock() - defer ptr.mutex.Unlock() - - // Merge and sort. - newFiles := append(ptr.files, files...) - sort.Slice(newFiles, func(i, j int) bool { - return newFiles[i].Base() < newFiles[j].Base() - }) - - // Reject overlapping files. - // Discard adjacent identical files. - out := newFiles[:0] - for i, file := range newFiles { - if i > 0 { - prev := newFiles[i-1] - if file == prev { - continue - } - if prev.Base()+prev.Size()+1 > file.Base() { - panic(fmt.Sprintf("file %s (%d-%d) overlaps with file %s (%d-%d)", - prev.Name(), prev.Base(), prev.Base()+prev.Size(), - file.Name(), file.Base(), file.Base()+file.Size())) - } - } - out = append(out, file) - } - newFiles = out - - ptr.files = newFiles - - // Advance FileSet.Base(). - if len(newFiles) > 0 { - last := newFiles[len(newFiles)-1] - newBase := last.Base() + last.Size() + 1 - if ptr.base < newBase { - ptr.base = newBase - } - } -} - -// FileSetFor returns a new FileSet containing a sequence of new Files with -// the same base, size, and line as the input files, for use in APIs that -// require a FileSet. -// -// Precondition: the input files must be non-overlapping, and sorted in order -// of their Base. -func FileSetFor(files ...*token.File) *token.FileSet { - fset := token.NewFileSet() - for _, f := range files { - f2 := fset.AddFile(f.Name(), f.Base(), f.Size()) - lines := GetLines(f) - f2.SetLines(lines) - } - return fset -} - -// CloneFileSet creates a new FileSet holding all files in fset. It does not -// create copies of the token.Files in fset: they are added to the resulting -// FileSet unmodified. -func CloneFileSet(fset *token.FileSet) *token.FileSet { - var files []*token.File - fset.Iterate(func(f *token.File) bool { - files = append(files, f) - return true - }) - newFileSet := token.NewFileSet() - AddExistingFiles(newFileSet, files) - return newFileSet -} diff --git a/vendor/golang.org/x/tools/internal/typeparams/common.go b/vendor/golang.org/x/tools/internal/typeparams/common.go index 89bd256dc6..0b84acc5c7 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/common.go +++ b/vendor/golang.org/x/tools/internal/typeparams/common.go @@ -16,8 +16,6 @@ import ( "go/ast" "go/token" "go/types" - - "golang.org/x/tools/internal/aliases" ) // UnpackIndexExpr extracts data from AST nodes that represent index @@ -65,7 +63,7 @@ func PackIndexExpr(x ast.Expr, lbrack token.Pos, indices []ast.Expr, rbrack toke // IsTypeParam reports whether t is a type parameter (or an alias of one). func IsTypeParam(t types.Type) bool { - _, ok := aliases.Unalias(t).(*types.TypeParam) + _, ok := types.Unalias(t).(*types.TypeParam) return ok } @@ -93,8 +91,8 @@ func IsTypeParam(t types.Type) bool { // In this case, GenericAssignableTo reports that instantiations of Container // are assignable to the corresponding instantiation of Interface. func GenericAssignableTo(ctxt *types.Context, V, T types.Type) bool { - V = aliases.Unalias(V) - T = aliases.Unalias(T) + V = types.Unalias(V) + T = types.Unalias(T) // If V and T are not both named, or do not have matching non-empty type // parameter lists, fall back on types.AssignableTo. diff --git a/vendor/golang.org/x/tools/internal/typeparams/free.go b/vendor/golang.org/x/tools/internal/typeparams/free.go index a1d138226c..0ade5c2949 100644 --- a/vendor/golang.org/x/tools/internal/typeparams/free.go +++ b/vendor/golang.org/x/tools/internal/typeparams/free.go @@ -37,8 +37,20 @@ func (w *Free) Has(typ types.Type) (res bool) { case nil, *types.Basic: // TODO(gri) should nil be handled here? break - case *aliases.Alias: - return w.Has(aliases.Unalias(t)) + case *types.Alias: + if aliases.TypeParams(t).Len() > aliases.TypeArgs(t).Len() { + return true // This is an uninstantiated Alias. + } + // The expansion of an alias can have free type parameters, + // whether or not the alias itself has type parameters: + // + // func _[K comparable]() { + // type Set = map[K]bool // free(Set) = {K} + // type MapTo[V] = map[K]V // free(Map[foo]) = {V} + // } + // + // So, we must Unalias. + return w.Has(types.Unalias(t)) case *types.Array: return w.Has(t.Elem()) @@ -98,9 +110,8 @@ func (w *Free) Has(typ types.Type) (res bool) { case *types.Named: args := t.TypeArgs() - // TODO(taking): this does not match go/types/infer.go. Check with rfindley. if params := t.TypeParams(); params.Len() > args.Len() { - return true + return true // this is an uninstantiated named type. } for i, n := 0, args.Len(); i < n; i++ { if w.Has(args.At(i)) { diff --git a/vendor/golang.org/x/tools/internal/typesinternal/element.go b/vendor/golang.org/x/tools/internal/typesinternal/element.go new file mode 100644 index 0000000000..4957f02164 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typesinternal/element.go @@ -0,0 +1,133 @@ +// Copyright 2024 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package typesinternal + +import ( + "fmt" + "go/types" + + "golang.org/x/tools/go/types/typeutil" +) + +// ForEachElement calls f for type T and each type reachable from its +// type through reflection. It does this by recursively stripping off +// type constructors; in addition, for each named type N, the type *N +// is added to the result as it may have additional methods. +// +// The caller must provide an initially empty set used to de-duplicate +// identical types, potentially across multiple calls to ForEachElement. +// (Its final value holds all the elements seen, matching the arguments +// passed to f.) +// +// TODO(adonovan): share/harmonize with go/callgraph/rta. +func ForEachElement(rtypes *typeutil.Map, msets *typeutil.MethodSetCache, T types.Type, f func(types.Type)) { + var visit func(T types.Type, skip bool) + visit = func(T types.Type, skip bool) { + if !skip { + if seen, _ := rtypes.Set(T, true).(bool); seen { + return // de-dup + } + + f(T) // notify caller of new element type + } + + // Recursion over signatures of each method. + tmset := msets.MethodSet(T) + for i := 0; i < tmset.Len(); i++ { + sig := tmset.At(i).Type().(*types.Signature) + // It is tempting to call visit(sig, false) + // but, as noted in golang.org/cl/65450043, + // the Signature.Recv field is ignored by + // types.Identical and typeutil.Map, which + // is confusing at best. + // + // More importantly, the true signature rtype + // reachable from a method using reflection + // has no receiver but an extra ordinary parameter. + // For the Read method of io.Reader we want: + // func(Reader, []byte) (int, error) + // but here sig is: + // func([]byte) (int, error) + // with .Recv = Reader (though it is hard to + // notice because it doesn't affect Signature.String + // or types.Identical). + // + // TODO(adonovan): construct and visit the correct + // non-method signature with an extra parameter + // (though since unnamed func types have no methods + // there is essentially no actual demand for this). + // + // TODO(adonovan): document whether or not it is + // safe to skip non-exported methods (as RTA does). + visit(sig.Params(), true) // skip the Tuple + visit(sig.Results(), true) // skip the Tuple + } + + switch T := T.(type) { + case *types.Alias: + visit(types.Unalias(T), skip) // emulates the pre-Alias behavior + + case *types.Basic: + // nop + + case *types.Interface: + // nop---handled by recursion over method set. + + case *types.Pointer: + visit(T.Elem(), false) + + case *types.Slice: + visit(T.Elem(), false) + + case *types.Chan: + visit(T.Elem(), false) + + case *types.Map: + visit(T.Key(), false) + visit(T.Elem(), false) + + case *types.Signature: + if T.Recv() != nil { + panic(fmt.Sprintf("Signature %s has Recv %s", T, T.Recv())) + } + visit(T.Params(), true) // skip the Tuple + visit(T.Results(), true) // skip the Tuple + + case *types.Named: + // A pointer-to-named type can be derived from a named + // type via reflection. It may have methods too. + visit(types.NewPointer(T), false) + + // Consider 'type T struct{S}' where S has methods. + // Reflection provides no way to get from T to struct{S}, + // only to S, so the method set of struct{S} is unwanted, + // so set 'skip' flag during recursion. + visit(T.Underlying(), true) // skip the unnamed type + + case *types.Array: + visit(T.Elem(), false) + + case *types.Struct: + for i, n := 0, T.NumFields(); i < n; i++ { + // TODO(adonovan): document whether or not + // it is safe to skip non-exported fields. + visit(T.Field(i).Type(), false) + } + + case *types.Tuple: + for i, n := 0, T.Len(); i < n; i++ { + visit(T.At(i).Type(), false) + } + + case *types.TypeParam, *types.Union: + // forEachReachable must not be called on parameterized types. + panic(T) + + default: + panic(T) + } + } + visit(T, false) +} diff --git a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go index 834e05381c..131caab284 100644 --- a/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go +++ b/vendor/golang.org/x/tools/internal/typesinternal/errorcode.go @@ -838,7 +838,7 @@ const ( // InvalidCap occurs when an argument to the cap built-in function is not of // supported type. // - // See https://golang.org/ref/spec#Lengthand_capacity for information on + // See https://golang.org/ref/spec#Length_and_capacity for information on // which underlying types are supported as arguments to cap and len. // // Example: @@ -859,7 +859,7 @@ const ( // InvalidCopy occurs when the arguments are not of slice type or do not // have compatible type. // - // See https://golang.org/ref/spec#Appendingand_copying_slices for more + // See https://golang.org/ref/spec#Appending_and_copying_slices for more // information on the type requirements for the copy built-in. // // Example: @@ -897,7 +897,7 @@ const ( // InvalidLen occurs when an argument to the len built-in function is not of // supported type. // - // See https://golang.org/ref/spec#Lengthand_capacity for information on + // See https://golang.org/ref/spec#Length_and_capacity for information on // which underlying types are supported as arguments to cap and len. // // Example: @@ -914,7 +914,7 @@ const ( // InvalidMake occurs when make is called with an unsupported type argument. // - // See https://golang.org/ref/spec#Makingslices_maps_and_channels for + // See https://golang.org/ref/spec#Making_slices_maps_and_channels for // information on the types that may be created using make. // // Example: diff --git a/vendor/golang.org/x/tools/internal/typesinternal/recv.go b/vendor/golang.org/x/tools/internal/typesinternal/recv.go index fea7c8b75e..ba6f4f4ebd 100644 --- a/vendor/golang.org/x/tools/internal/typesinternal/recv.go +++ b/vendor/golang.org/x/tools/internal/typesinternal/recv.go @@ -6,8 +6,6 @@ package typesinternal import ( "go/types" - - "golang.org/x/tools/internal/aliases" ) // ReceiverNamed returns the named type (if any) associated with the @@ -15,11 +13,11 @@ import ( // It also reports whether a Pointer was present. func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) { t := recv.Type() - if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok { + if ptr, ok := types.Unalias(t).(*types.Pointer); ok { isPtr = true t = ptr.Elem() } - named, _ = aliases.Unalias(t).(*types.Named) + named, _ = types.Unalias(t).(*types.Named) return } @@ -36,7 +34,7 @@ func ReceiverNamed(recv *types.Var) (isPtr bool, named *types.Named) { // indirection from the type, regardless of named types (analogous to // a LOAD instruction). func Unpointer(t types.Type) types.Type { - if ptr, ok := aliases.Unalias(t).(*types.Pointer); ok { + if ptr, ok := types.Unalias(t).(*types.Pointer); ok { return ptr.Elem() } return t diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types.go b/vendor/golang.org/x/tools/internal/typesinternal/types.go index 8392328612..df3ea52125 100644 --- a/vendor/golang.org/x/tools/internal/typesinternal/types.go +++ b/vendor/golang.org/x/tools/internal/typesinternal/types.go @@ -11,6 +11,8 @@ import ( "go/types" "reflect" "unsafe" + + "golang.org/x/tools/internal/aliases" ) func SetUsesCgo(conf *types.Config) bool { @@ -63,3 +65,57 @@ func NameRelativeTo(pkg *types.Package) types.Qualifier { return other.Name() } } + +// A NamedOrAlias is a [types.Type] that is named (as +// defined by the spec) and capable of bearing type parameters: it +// abstracts aliases ([types.Alias]) and defined types +// ([types.Named]). +// +// Every type declared by an explicit "type" declaration is a +// NamedOrAlias. (Built-in type symbols may additionally +// have type [types.Basic], which is not a NamedOrAlias, +// though the spec regards them as "named".) +// +// NamedOrAlias cannot expose the Origin method, because +// [types.Alias.Origin] and [types.Named.Origin] have different +// (covariant) result types; use [Origin] instead. +type NamedOrAlias interface { + types.Type + Obj() *types.TypeName +} + +// TypeParams is a light shim around t.TypeParams(). +// (go/types.Alias).TypeParams requires >= 1.23. +func TypeParams(t NamedOrAlias) *types.TypeParamList { + switch t := t.(type) { + case *types.Alias: + return aliases.TypeParams(t) + case *types.Named: + return t.TypeParams() + } + return nil +} + +// TypeArgs is a light shim around t.TypeArgs(). +// (go/types.Alias).TypeArgs requires >= 1.23. +func TypeArgs(t NamedOrAlias) *types.TypeList { + switch t := t.(type) { + case *types.Alias: + return aliases.TypeArgs(t) + case *types.Named: + return t.TypeArgs() + } + return nil +} + +// Origin returns the generic type of the Named or Alias type t if it +// is instantiated, otherwise it returns t. +func Origin(t NamedOrAlias) NamedOrAlias { + switch t := t.(type) { + case *types.Alias: + return aliases.Origin(t) + case *types.Named: + return t.Origin() + } + return t +} diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain.go b/vendor/golang.org/x/tools/internal/versions/toolchain.go deleted file mode 100644 index 377bf7a53b..0000000000 --- a/vendor/golang.org/x/tools/internal/versions/toolchain.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package versions - -// toolchain is maximum version (<1.22) that the go toolchain used -// to build the current tool is known to support. -// -// When a tool is built with >=1.22, the value of toolchain is unused. -// -// x/tools does not support building with go <1.18. So we take this -// as the minimum possible maximum. -var toolchain string = Go1_18 diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go deleted file mode 100644 index f65beed9d8..0000000000 --- a/vendor/golang.org/x/tools/internal/versions/toolchain_go119.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.19 -// +build go1.19 - -package versions - -func init() { - if Compare(toolchain, Go1_19) < 0 { - toolchain = Go1_19 - } -} diff --git a/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go b/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go deleted file mode 100644 index b7ef216dfe..0000000000 --- a/vendor/golang.org/x/tools/internal/versions/toolchain_go121.go +++ /dev/null @@ -1,14 +0,0 @@ -// Copyright 2024 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.21 -// +build go1.21 - -package versions - -func init() { - if Compare(toolchain, Go1_21) < 0 { - toolchain = Go1_21 - } -} diff --git a/vendor/golang.org/x/tools/internal/versions/types.go b/vendor/golang.org/x/tools/internal/versions/types.go index 562eef21fa..0fc10ce4eb 100644 --- a/vendor/golang.org/x/tools/internal/versions/types.go +++ b/vendor/golang.org/x/tools/internal/versions/types.go @@ -5,15 +5,29 @@ package versions import ( + "go/ast" "go/types" ) -// GoVersion returns the Go version of the type package. -// It returns zero if no version can be determined. -func GoVersion(pkg *types.Package) string { - // TODO(taking): x/tools can call GoVersion() [from 1.21] after 1.25. - if pkg, ok := any(pkg).(interface{ GoVersion() string }); ok { - return pkg.GoVersion() +// FileVersion returns a file's Go version. +// The reported version is an unknown Future version if a +// version cannot be determined. +func FileVersion(info *types.Info, file *ast.File) string { + // In tools built with Go >= 1.22, the Go version of a file + // follow a cascades of sources: + // 1) types.Info.FileVersion, which follows the cascade: + // 1.a) file version (ast.File.GoVersion), + // 1.b) the package version (types.Config.GoVersion), or + // 2) is some unknown Future version. + // + // File versions require a valid package version to be provided to types + // in Config.GoVersion. Config.GoVersion is either from the package's module + // or the toolchain (go run). This value should be provided by go/packages + // or unitchecker.Config.GoVersion. + if v := info.FileVersions[file]; IsValid(v) { + return v } - return "" + // Note: we could instead return runtime.Version() [if valid]. + // This would act as a max version on what a tool can support. + return Future } diff --git a/vendor/golang.org/x/tools/internal/versions/types_go121.go b/vendor/golang.org/x/tools/internal/versions/types_go121.go deleted file mode 100644 index b4345d3349..0000000000 --- a/vendor/golang.org/x/tools/internal/versions/types_go121.go +++ /dev/null @@ -1,30 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build !go1.22 -// +build !go1.22 - -package versions - -import ( - "go/ast" - "go/types" -) - -// FileVersion returns a language version (<=1.21) derived from runtime.Version() -// or an unknown future version. -func FileVersion(info *types.Info, file *ast.File) string { - // In x/tools built with Go <= 1.21, we do not have Info.FileVersions - // available. We use a go version derived from the toolchain used to - // compile the tool by default. - // This will be <= go1.21. We take this as the maximum version that - // this tool can support. - // - // There are no features currently in x/tools that need to tell fine grained - // differences for versions <1.22. - return toolchain -} - -// InitFileVersions is a noop when compiled with this Go version. -func InitFileVersions(*types.Info) {} diff --git a/vendor/golang.org/x/tools/internal/versions/types_go122.go b/vendor/golang.org/x/tools/internal/versions/types_go122.go deleted file mode 100644 index aac5db62c9..0000000000 --- a/vendor/golang.org/x/tools/internal/versions/types_go122.go +++ /dev/null @@ -1,41 +0,0 @@ -// Copyright 2023 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -//go:build go1.22 -// +build go1.22 - -package versions - -import ( - "go/ast" - "go/types" -) - -// FileVersion returns a file's Go version. -// The reported version is an unknown Future version if a -// version cannot be determined. -func FileVersion(info *types.Info, file *ast.File) string { - // In tools built with Go >= 1.22, the Go version of a file - // follow a cascades of sources: - // 1) types.Info.FileVersion, which follows the cascade: - // 1.a) file version (ast.File.GoVersion), - // 1.b) the package version (types.Config.GoVersion), or - // 2) is some unknown Future version. - // - // File versions require a valid package version to be provided to types - // in Config.GoVersion. Config.GoVersion is either from the package's module - // or the toolchain (go run). This value should be provided by go/packages - // or unitchecker.Config.GoVersion. - if v := info.FileVersions[file]; IsValid(v) { - return v - } - // Note: we could instead return runtime.Version() [if valid]. - // This would act as a max version on what a tool can support. - return Future -} - -// InitFileVersions initializes info to record Go versions for Go files. -func InitFileVersions(info *types.Info) { - info.FileVersions = make(map[*ast.File]string) -} diff --git a/vendor/google.golang.org/grpc/CONTRIBUTING.md b/vendor/google.golang.org/grpc/CONTRIBUTING.md index 0854d298e4..d9bfa6e1e7 100644 --- a/vendor/google.golang.org/grpc/CONTRIBUTING.md +++ b/vendor/google.golang.org/grpc/CONTRIBUTING.md @@ -4,7 +4,7 @@ We definitely welcome your patches and contributions to gRPC! Please read the gR organization's [governance rules](https://github.com/grpc/grpc-community/blob/master/governance.md) and [contribution guidelines](https://github.com/grpc/grpc-community/blob/master/CONTRIBUTING.md) before proceeding. -If you are new to github, please start by reading [Pull Request howto](https://help.github.com/articles/about-pull-requests/) +If you are new to GitHub, please start by reading [Pull Request howto](https://help.github.com/articles/about-pull-requests/) ## Legal requirements @@ -25,8 +25,8 @@ How to get your contributions merged smoothly and quickly. is a great place to start. These issues are well-documented and usually can be resolved with a single pull request. -- If you are adding a new file, make sure it has the copyright message template - at the top as a comment. You can copy over the message from an existing file +- If you are adding a new file, make sure it has the copyright message template + at the top as a comment. You can copy over the message from an existing file and update the year. - The grpc package should only depend on standard Go packages and a small number @@ -39,12 +39,12 @@ How to get your contributions merged smoothly and quickly. proposal](https://github.com/grpc/proposal). - Provide a good **PR description** as a record of **what** change is being made - and **why** it was made. Link to a github issue if it exists. + and **why** it was made. Link to a GitHub issue if it exists. -- If you want to fix formatting or style, consider whether your changes are an - obvious improvement or might be considered a personal preference. If a style - change is based on preference, it likely will not be accepted. If it corrects - widely agreed-upon anti-patterns, then please do create a PR and explain the +- If you want to fix formatting or style, consider whether your changes are an + obvious improvement or might be considered a personal preference. If a style + change is based on preference, it likely will not be accepted. If it corrects + widely agreed-upon anti-patterns, then please do create a PR and explain the benefits of the change. - Unless your PR is trivial, you should expect there will be reviewer comments diff --git a/vendor/google.golang.org/grpc/balancer/balancer.go b/vendor/google.golang.org/grpc/balancer/balancer.go index b181f386a1..3a2092f105 100644 --- a/vendor/google.golang.org/grpc/balancer/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/balancer.go @@ -130,7 +130,7 @@ type SubConn interface { // UpdateAddresses updates the addresses used in this SubConn. // gRPC checks if currently-connected address is still in the new list. // If it's in the list, the connection will be kept. - // If it's not in the list, the connection will gracefully closed, and + // If it's not in the list, the connection will gracefully close, and // a new connection will be created. // // This will trigger a state transition for the SubConn. @@ -142,8 +142,11 @@ type SubConn interface { Connect() // GetOrBuildProducer returns a reference to the existing Producer for this // ProducerBuilder in this SubConn, or, if one does not currently exist, - // creates a new one and returns it. Returns a close function which must - // be called when the Producer is no longer needed. + // creates a new one and returns it. Returns a close function which may be + // called when the Producer is no longer needed. Otherwise the producer + // will automatically be closed upon connection loss or subchannel close. + // Should only be called on a SubConn in state Ready. Otherwise the + // producer will be unable to create streams. GetOrBuildProducer(ProducerBuilder) (p Producer, close func()) // Shutdown shuts down the SubConn gracefully. Any started RPCs will be // allowed to complete. No future calls should be made on the SubConn. @@ -452,8 +455,10 @@ type ProducerBuilder interface { // Build creates a Producer. The first parameter is always a // grpc.ClientConnInterface (a type to allow creating RPCs/streams on the // associated SubConn), but is declared as `any` to avoid a dependency - // cycle. Should also return a close function that will be called when all - // references to the Producer have been given up. + // cycle. Build also returns a close function that will be called when all + // references to the Producer have been given up for a SubConn, or when a + // connectivity state change occurs on the SubConn. The close function + // should always block until all asynchronous cleanup work is completed. Build(grpcClientConnInterface any) (p Producer, close func()) } diff --git a/vendor/google.golang.org/grpc/balancer/base/balancer.go b/vendor/google.golang.org/grpc/balancer/base/balancer.go index 2b87bd79c7..d5ed172ae6 100644 --- a/vendor/google.golang.org/grpc/balancer/base/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/base/balancer.go @@ -133,7 +133,7 @@ func (b *baseBalancer) UpdateClientConnState(s balancer.ClientConnState) error { } } // If resolver state contains no addresses, return an error so ClientConn - // will trigger re-resolve. Also records this as an resolver error, so when + // will trigger re-resolve. Also records this as a resolver error, so when // the overall state turns transient failure, the error message will have // the zero address information. if len(s.ResolverState.Addresses) == 0 { diff --git a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go index c098762741..0770b88e96 100644 --- a/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go +++ b/vendor/google.golang.org/grpc/balancer/grpclb/grpclb.go @@ -197,7 +197,7 @@ type lbBalancer struct { // manualResolver is used in the remote LB ClientConn inside grpclb. When // resolved address updates are received by grpclb, filtered updates will be - // send to remote LB ClientConn through this resolver. + // sent to remote LB ClientConn through this resolver. manualResolver *manual.Resolver // The ClientConn to talk to the remote balancer. ccRemoteLB *remoteBalancerCCWrapper diff --git a/vendor/google.golang.org/grpc/balancer/pickfirst/internal/internal.go b/vendor/google.golang.org/grpc/balancer/pickfirst/internal/internal.go new file mode 100644 index 0000000000..c519789458 --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/pickfirst/internal/internal.go @@ -0,0 +1,24 @@ +/* + * Copyright 2024 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package internal contains code internal to the pickfirst package. +package internal + +import "math/rand" + +// RandShuffle pseudo-randomizes the order of addresses. +var RandShuffle = rand.Shuffle diff --git a/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirst.go b/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirst.go index 4d69b4052f..e069346a75 100644 --- a/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirst.go +++ b/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirst.go @@ -26,18 +26,23 @@ import ( "math/rand" "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/pickfirst/internal" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/grpclog" - "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/envconfig" internalgrpclog "google.golang.org/grpc/internal/grpclog" "google.golang.org/grpc/internal/pretty" "google.golang.org/grpc/resolver" "google.golang.org/grpc/serviceconfig" + + _ "google.golang.org/grpc/balancer/pickfirst/pickfirstleaf" // For automatically registering the new pickfirst if required. ) func init() { + if envconfig.NewPickFirstEnabled { + return + } balancer.Register(pickfirstBuilder{}) - internal.ShuffleAddressListForTesting = func(n int, swap func(i, j int)) { rand.Shuffle(n, swap) } } var logger = grpclog.Component("pick-first-lb") @@ -103,10 +108,13 @@ func (b *pickfirstBalancer) ResolverError(err error) { }) } +// Shuffler is an interface for shuffling an address list. type Shuffler interface { ShuffleAddressListForTesting(n int, swap func(i, j int)) } +// ShuffleAddressListForTesting pseudo-randomizes the order of addresses. n +// is the number of elements. swap swaps the elements with indexes i and j. func ShuffleAddressListForTesting(n int, swap func(i, j int)) { rand.Shuffle(n, swap) } func (b *pickfirstBalancer) UpdateClientConnState(state balancer.ClientConnState) error { @@ -140,7 +148,7 @@ func (b *pickfirstBalancer) UpdateClientConnState(state balancer.ClientConnState // within each endpoint. - A61 if cfg.ShuffleAddressList { endpoints = append([]resolver.Endpoint{}, endpoints...) - internal.ShuffleAddressListForTesting.(func(int, func(int, int)))(len(endpoints), func(i, j int) { endpoints[i], endpoints[j] = endpoints[j], endpoints[i] }) + internal.RandShuffle(len(endpoints), func(i, j int) { endpoints[i], endpoints[j] = endpoints[j], endpoints[i] }) } // "Flatten the list by concatenating the ordered list of addresses for each diff --git a/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirstleaf/pickfirstleaf.go b/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirstleaf/pickfirstleaf.go new file mode 100644 index 0000000000..985b6edc7f --- /dev/null +++ b/vendor/google.golang.org/grpc/balancer/pickfirst/pickfirstleaf/pickfirstleaf.go @@ -0,0 +1,625 @@ +/* + * + * Copyright 2024 gRPC authors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +// Package pickfirstleaf contains the pick_first load balancing policy which +// will be the universal leaf policy after dualstack changes are implemented. +// +// # Experimental +// +// Notice: This package is EXPERIMENTAL and may be changed or removed in a +// later release. +package pickfirstleaf + +import ( + "encoding/json" + "errors" + "fmt" + "sync" + + "google.golang.org/grpc/balancer" + "google.golang.org/grpc/balancer/pickfirst/internal" + "google.golang.org/grpc/connectivity" + "google.golang.org/grpc/grpclog" + "google.golang.org/grpc/internal/envconfig" + internalgrpclog "google.golang.org/grpc/internal/grpclog" + "google.golang.org/grpc/internal/pretty" + "google.golang.org/grpc/resolver" + "google.golang.org/grpc/serviceconfig" +) + +func init() { + if envconfig.NewPickFirstEnabled { + // Register as the default pick_first balancer. + Name = "pick_first" + } + balancer.Register(pickfirstBuilder{}) +} + +var ( + logger = grpclog.Component("pick-first-leaf-lb") + // Name is the name of the pick_first_leaf balancer. + // It is changed to "pick_first" in init() if this balancer is to be + // registered as the default pickfirst. + Name = "pick_first_leaf" +) + +// TODO: change to pick-first when this becomes the default pick_first policy. +const logPrefix = "[pick-first-leaf-lb %p] " + +type pickfirstBuilder struct{} + +func (pickfirstBuilder) Build(cc balancer.ClientConn, _ balancer.BuildOptions) balancer.Balancer { + b := &pickfirstBalancer{ + cc: cc, + addressList: addressList{}, + subConns: resolver.NewAddressMap(), + state: connectivity.Connecting, + mu: sync.Mutex{}, + } + b.logger = internalgrpclog.NewPrefixLogger(logger, fmt.Sprintf(logPrefix, b)) + return b +} + +func (b pickfirstBuilder) Name() string { + return Name +} + +func (pickfirstBuilder) ParseConfig(js json.RawMessage) (serviceconfig.LoadBalancingConfig, error) { + var cfg pfConfig + if err := json.Unmarshal(js, &cfg); err != nil { + return nil, fmt.Errorf("pickfirst: unable to unmarshal LB policy config: %s, error: %v", string(js), err) + } + return cfg, nil +} + +type pfConfig struct { + serviceconfig.LoadBalancingConfig `json:"-"` + + // If set to true, instructs the LB policy to shuffle the order of the list + // of endpoints received from the name resolver before attempting to + // connect to them. + ShuffleAddressList bool `json:"shuffleAddressList"` +} + +// scData keeps track of the current state of the subConn. +// It is not safe for concurrent access. +type scData struct { + // The following fields are initialized at build time and read-only after + // that. + subConn balancer.SubConn + addr resolver.Address + + state connectivity.State + lastErr error +} + +func (b *pickfirstBalancer) newSCData(addr resolver.Address) (*scData, error) { + sd := &scData{ + state: connectivity.Idle, + addr: addr, + } + sc, err := b.cc.NewSubConn([]resolver.Address{addr}, balancer.NewSubConnOptions{ + StateListener: func(state balancer.SubConnState) { + b.updateSubConnState(sd, state) + }, + }) + if err != nil { + return nil, err + } + sd.subConn = sc + return sd, nil +} + +type pickfirstBalancer struct { + // The following fields are initialized at build time and read-only after + // that and therefore do not need to be guarded by a mutex. + logger *internalgrpclog.PrefixLogger + cc balancer.ClientConn + + // The mutex is used to ensure synchronization of updates triggered + // from the idle picker and the already serialized resolver, + // SubConn state updates. + mu sync.Mutex + state connectivity.State + // scData for active subonns mapped by address. + subConns *resolver.AddressMap + addressList addressList + firstPass bool + numTF int +} + +// ResolverError is called by the ClientConn when the name resolver produces +// an error or when pickfirst determined the resolver update to be invalid. +func (b *pickfirstBalancer) ResolverError(err error) { + b.mu.Lock() + defer b.mu.Unlock() + b.resolverErrorLocked(err) +} + +func (b *pickfirstBalancer) resolverErrorLocked(err error) { + if b.logger.V(2) { + b.logger.Infof("Received error from the name resolver: %v", err) + } + + // The picker will not change since the balancer does not currently + // report an error. If the balancer hasn't received a single good resolver + // update yet, transition to TRANSIENT_FAILURE. + if b.state != connectivity.TransientFailure && b.addressList.size() > 0 { + if b.logger.V(2) { + b.logger.Infof("Ignoring resolver error because balancer is using a previous good update.") + } + return + } + + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.TransientFailure, + Picker: &picker{err: fmt.Errorf("name resolver error: %v", err)}, + }) +} + +func (b *pickfirstBalancer) UpdateClientConnState(state balancer.ClientConnState) error { + b.mu.Lock() + defer b.mu.Unlock() + if len(state.ResolverState.Addresses) == 0 && len(state.ResolverState.Endpoints) == 0 { + // Cleanup state pertaining to the previous resolver state. + // Treat an empty address list like an error by calling b.ResolverError. + b.state = connectivity.TransientFailure + b.closeSubConnsLocked() + b.addressList.updateAddrs(nil) + b.resolverErrorLocked(errors.New("produced zero addresses")) + return balancer.ErrBadResolverState + } + cfg, ok := state.BalancerConfig.(pfConfig) + if state.BalancerConfig != nil && !ok { + return fmt.Errorf("pickfirst: received illegal BalancerConfig (type %T): %v: %w", state.BalancerConfig, state.BalancerConfig, balancer.ErrBadResolverState) + } + + if b.logger.V(2) { + b.logger.Infof("Received new config %s, resolver state %s", pretty.ToJSON(cfg), pretty.ToJSON(state.ResolverState)) + } + + var newAddrs []resolver.Address + if endpoints := state.ResolverState.Endpoints; len(endpoints) != 0 { + // Perform the optional shuffling described in gRFC A62. The shuffling + // will change the order of endpoints but not touch the order of the + // addresses within each endpoint. - A61 + if cfg.ShuffleAddressList { + endpoints = append([]resolver.Endpoint{}, endpoints...) + internal.RandShuffle(len(endpoints), func(i, j int) { endpoints[i], endpoints[j] = endpoints[j], endpoints[i] }) + } + + // "Flatten the list by concatenating the ordered list of addresses for + // each of the endpoints, in order." - A61 + for _, endpoint := range endpoints { + // "In the flattened list, interleave addresses from the two address + // families, as per RFC-8305 section 4." - A61 + // TODO: support the above language. + newAddrs = append(newAddrs, endpoint.Addresses...) + } + } else { + // Endpoints not set, process addresses until we migrate resolver + // emissions fully to Endpoints. The top channel does wrap emitted + // addresses with endpoints, however some balancers such as weighted + // target do not forward the corresponding correct endpoints down/split + // endpoints properly. Once all balancers correctly forward endpoints + // down, can delete this else conditional. + newAddrs = state.ResolverState.Addresses + if cfg.ShuffleAddressList { + newAddrs = append([]resolver.Address{}, newAddrs...) + internal.RandShuffle(len(endpoints), func(i, j int) { endpoints[i], endpoints[j] = endpoints[j], endpoints[i] }) + } + } + + // If an address appears in multiple endpoints or in the same endpoint + // multiple times, we keep it only once. We will create only one SubConn + // for the address because an AddressMap is used to store SubConns. + // Not de-duplicating would result in attempting to connect to the same + // SubConn multiple times in the same pass. We don't want this. + newAddrs = deDupAddresses(newAddrs) + + // Since we have a new set of addresses, we are again at first pass. + b.firstPass = true + + // If the previous ready SubConn exists in new address list, + // keep this connection and don't create new SubConns. + prevAddr := b.addressList.currentAddress() + prevAddrsCount := b.addressList.size() + b.addressList.updateAddrs(newAddrs) + if b.state == connectivity.Ready && b.addressList.seekTo(prevAddr) { + return nil + } + + b.reconcileSubConnsLocked(newAddrs) + // If it's the first resolver update or the balancer was already READY + // (but the new address list does not contain the ready SubConn) or + // CONNECTING, enter CONNECTING. + // We may be in TRANSIENT_FAILURE due to a previous empty address list, + // we should still enter CONNECTING because the sticky TF behaviour + // mentioned in A62 applies only when the TRANSIENT_FAILURE is reported + // due to connectivity failures. + if b.state == connectivity.Ready || b.state == connectivity.Connecting || prevAddrsCount == 0 { + // Start connection attempt at first address. + b.state = connectivity.Connecting + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.Connecting, + Picker: &picker{err: balancer.ErrNoSubConnAvailable}, + }) + b.requestConnectionLocked() + } else if b.state == connectivity.TransientFailure { + // If we're in TRANSIENT_FAILURE, we stay in TRANSIENT_FAILURE until + // we're READY. See A62. + b.requestConnectionLocked() + } + return nil +} + +// UpdateSubConnState is unused as a StateListener is always registered when +// creating SubConns. +func (b *pickfirstBalancer) UpdateSubConnState(subConn balancer.SubConn, state balancer.SubConnState) { + b.logger.Errorf("UpdateSubConnState(%v, %+v) called unexpectedly", subConn, state) +} + +func (b *pickfirstBalancer) Close() { + b.mu.Lock() + defer b.mu.Unlock() + b.closeSubConnsLocked() + b.state = connectivity.Shutdown +} + +// ExitIdle moves the balancer out of idle state. It can be called concurrently +// by the idlePicker and clientConn so access to variables should be +// synchronized. +func (b *pickfirstBalancer) ExitIdle() { + b.mu.Lock() + defer b.mu.Unlock() + if b.state == connectivity.Idle && b.addressList.currentAddress() == b.addressList.first() { + b.firstPass = true + b.requestConnectionLocked() + } +} + +func (b *pickfirstBalancer) closeSubConnsLocked() { + for _, sd := range b.subConns.Values() { + sd.(*scData).subConn.Shutdown() + } + b.subConns = resolver.NewAddressMap() +} + +// deDupAddresses ensures that each address appears only once in the slice. +func deDupAddresses(addrs []resolver.Address) []resolver.Address { + seenAddrs := resolver.NewAddressMap() + retAddrs := []resolver.Address{} + + for _, addr := range addrs { + if _, ok := seenAddrs.Get(addr); ok { + continue + } + retAddrs = append(retAddrs, addr) + } + return retAddrs +} + +// reconcileSubConnsLocked updates the active subchannels based on a new address +// list from the resolver. It does this by: +// - closing subchannels: any existing subchannels associated with addresses +// that are no longer in the updated list are shut down. +// - removing subchannels: entries for these closed subchannels are removed +// from the subchannel map. +// +// This ensures that the subchannel map accurately reflects the current set of +// addresses received from the name resolver. +func (b *pickfirstBalancer) reconcileSubConnsLocked(newAddrs []resolver.Address) { + newAddrsMap := resolver.NewAddressMap() + for _, addr := range newAddrs { + newAddrsMap.Set(addr, true) + } + + for _, oldAddr := range b.subConns.Keys() { + if _, ok := newAddrsMap.Get(oldAddr); ok { + continue + } + val, _ := b.subConns.Get(oldAddr) + val.(*scData).subConn.Shutdown() + b.subConns.Delete(oldAddr) + } +} + +// shutdownRemainingLocked shuts down remaining subConns. Called when a subConn +// becomes ready, which means that all other subConn must be shutdown. +func (b *pickfirstBalancer) shutdownRemainingLocked(selected *scData) { + for _, v := range b.subConns.Values() { + sd := v.(*scData) + if sd.subConn != selected.subConn { + sd.subConn.Shutdown() + } + } + b.subConns = resolver.NewAddressMap() + b.subConns.Set(selected.addr, selected) +} + +// requestConnectionLocked starts connecting on the subchannel corresponding to +// the current address. If no subchannel exists, one is created. If the current +// subchannel is in TransientFailure, a connection to the next address is +// attempted until a subchannel is found. +func (b *pickfirstBalancer) requestConnectionLocked() { + if !b.addressList.isValid() { + return + } + var lastErr error + for valid := true; valid; valid = b.addressList.increment() { + curAddr := b.addressList.currentAddress() + sd, ok := b.subConns.Get(curAddr) + if !ok { + var err error + // We want to assign the new scData to sd from the outer scope, + // hence we can't use := below. + sd, err = b.newSCData(curAddr) + if err != nil { + // This should never happen, unless the clientConn is being shut + // down. + if b.logger.V(2) { + b.logger.Infof("Failed to create a subConn for address %v: %v", curAddr.String(), err) + } + // Do nothing, the LB policy will be closed soon. + return + } + b.subConns.Set(curAddr, sd) + } + + scd := sd.(*scData) + switch scd.state { + case connectivity.Idle: + scd.subConn.Connect() + case connectivity.TransientFailure: + // Try the next address. + lastErr = scd.lastErr + continue + case connectivity.Ready: + // Should never happen. + b.logger.Errorf("Requesting a connection even though we have a READY SubConn") + case connectivity.Shutdown: + // Should never happen. + b.logger.Errorf("SubConn with state SHUTDOWN present in SubConns map") + case connectivity.Connecting: + // Wait for the SubConn to report success or failure. + } + return + } + // All the remaining addresses in the list are in TRANSIENT_FAILURE, end the + // first pass. + b.endFirstPassLocked(lastErr) +} + +func (b *pickfirstBalancer) updateSubConnState(sd *scData, newState balancer.SubConnState) { + b.mu.Lock() + defer b.mu.Unlock() + oldState := sd.state + sd.state = newState.ConnectivityState + // Previously relevant SubConns can still callback with state updates. + // To prevent pickers from returning these obsolete SubConns, this logic + // is included to check if the current list of active SubConns includes this + // SubConn. + if activeSD, found := b.subConns.Get(sd.addr); !found || activeSD != sd { + return + } + if newState.ConnectivityState == connectivity.Shutdown { + return + } + + if newState.ConnectivityState == connectivity.Ready { + b.shutdownRemainingLocked(sd) + if !b.addressList.seekTo(sd.addr) { + // This should not fail as we should have only one SubConn after + // entering READY. The SubConn should be present in the addressList. + b.logger.Errorf("Address %q not found address list in %v", sd.addr, b.addressList.addresses) + return + } + b.state = connectivity.Ready + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.Ready, + Picker: &picker{result: balancer.PickResult{SubConn: sd.subConn}}, + }) + return + } + + // If the LB policy is READY, and it receives a subchannel state change, + // it means that the READY subchannel has failed. + // A SubConn can also transition from CONNECTING directly to IDLE when + // a transport is successfully created, but the connection fails + // before the SubConn can send the notification for READY. We treat + // this as a successful connection and transition to IDLE. + if (b.state == connectivity.Ready && newState.ConnectivityState != connectivity.Ready) || (oldState == connectivity.Connecting && newState.ConnectivityState == connectivity.Idle) { + // Once a transport fails, the balancer enters IDLE and starts from + // the first address when the picker is used. + b.shutdownRemainingLocked(sd) + b.state = connectivity.Idle + b.addressList.reset() + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.Idle, + Picker: &idlePicker{exitIdle: sync.OnceFunc(b.ExitIdle)}, + }) + return + } + + if b.firstPass { + switch newState.ConnectivityState { + case connectivity.Connecting: + // The balancer can be in either IDLE, CONNECTING or + // TRANSIENT_FAILURE. If it's in TRANSIENT_FAILURE, stay in + // TRANSIENT_FAILURE until it's READY. See A62. + // If the balancer is already in CONNECTING, no update is needed. + if b.state == connectivity.Idle { + b.state = connectivity.Connecting + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.Connecting, + Picker: &picker{err: balancer.ErrNoSubConnAvailable}, + }) + } + case connectivity.TransientFailure: + sd.lastErr = newState.ConnectionError + // Since we're re-using common SubConns while handling resolver + // updates, we could receive an out of turn TRANSIENT_FAILURE from + // a pass over the previous address list. We ignore such updates. + + if curAddr := b.addressList.currentAddress(); !equalAddressIgnoringBalAttributes(&curAddr, &sd.addr) { + return + } + if b.addressList.increment() { + b.requestConnectionLocked() + return + } + // End of the first pass. + b.endFirstPassLocked(newState.ConnectionError) + } + return + } + + // We have finished the first pass, keep re-connecting failing SubConns. + switch newState.ConnectivityState { + case connectivity.TransientFailure: + b.numTF = (b.numTF + 1) % b.subConns.Len() + sd.lastErr = newState.ConnectionError + if b.numTF%b.subConns.Len() == 0 { + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.TransientFailure, + Picker: &picker{err: newState.ConnectionError}, + }) + } + // We don't need to request re-resolution since the SubConn already + // does that before reporting TRANSIENT_FAILURE. + // TODO: #7534 - Move re-resolution requests from SubConn into + // pick_first. + case connectivity.Idle: + sd.subConn.Connect() + } +} + +func (b *pickfirstBalancer) endFirstPassLocked(lastErr error) { + b.firstPass = false + b.numTF = 0 + b.state = connectivity.TransientFailure + + b.cc.UpdateState(balancer.State{ + ConnectivityState: connectivity.TransientFailure, + Picker: &picker{err: lastErr}, + }) + // Start re-connecting all the SubConns that are already in IDLE. + for _, v := range b.subConns.Values() { + sd := v.(*scData) + if sd.state == connectivity.Idle { + sd.subConn.Connect() + } + } +} + +type picker struct { + result balancer.PickResult + err error +} + +func (p *picker) Pick(balancer.PickInfo) (balancer.PickResult, error) { + return p.result, p.err +} + +// idlePicker is used when the SubConn is IDLE and kicks the SubConn into +// CONNECTING when Pick is called. +type idlePicker struct { + exitIdle func() +} + +func (i *idlePicker) Pick(balancer.PickInfo) (balancer.PickResult, error) { + i.exitIdle() + return balancer.PickResult{}, balancer.ErrNoSubConnAvailable +} + +// addressList manages sequentially iterating over addresses present in a list +// of endpoints. It provides a 1 dimensional view of the addresses present in +// the endpoints. +// This type is not safe for concurrent access. +type addressList struct { + addresses []resolver.Address + idx int +} + +func (al *addressList) isValid() bool { + return al.idx < len(al.addresses) +} + +func (al *addressList) size() int { + return len(al.addresses) +} + +// increment moves to the next index in the address list. +// This method returns false if it went off the list, true otherwise. +func (al *addressList) increment() bool { + if !al.isValid() { + return false + } + al.idx++ + return al.idx < len(al.addresses) +} + +// currentAddress returns the current address pointed to in the addressList. +// If the list is in an invalid state, it returns an empty address instead. +func (al *addressList) currentAddress() resolver.Address { + if !al.isValid() { + return resolver.Address{} + } + return al.addresses[al.idx] +} + +// first returns the first address in the list. If the list is empty, it returns +// an empty address instead. +func (al *addressList) first() resolver.Address { + if len(al.addresses) == 0 { + return resolver.Address{} + } + return al.addresses[0] +} + +func (al *addressList) reset() { + al.idx = 0 +} + +func (al *addressList) updateAddrs(addrs []resolver.Address) { + al.addresses = addrs + al.reset() +} + +// seekTo returns false if the needle was not found and the current index was +// left unchanged. +func (al *addressList) seekTo(needle resolver.Address) bool { + for ai, addr := range al.addresses { + if !equalAddressIgnoringBalAttributes(&addr, &needle) { + continue + } + al.idx = ai + return true + } + return false +} + +// equalAddressIgnoringBalAttributes returns true is a and b are considered +// equal. This is different from the Equal method on the resolver.Address type +// which considers all fields to determine equality. Here, we only consider +// fields that are meaningful to the SubConn. +func equalAddressIgnoringBalAttributes(a, b *resolver.Address) bool { + return a.Addr == b.Addr && a.ServerName == b.ServerName && + a.Attributes.Equal(b.Attributes) && + a.Metadata == b.Metadata +} diff --git a/vendor/google.golang.org/grpc/balancer/rls/internal/keys/builder.go b/vendor/google.golang.org/grpc/balancer/rls/internal/keys/builder.go index cc5ce510ad..7deb7dc7a2 100644 --- a/vendor/google.golang.org/grpc/balancer/rls/internal/keys/builder.go +++ b/vendor/google.golang.org/grpc/balancer/rls/internal/keys/builder.go @@ -189,7 +189,7 @@ func (b builder) Equal(a builder) bool { // Protobuf serialization maintains the order of repeated fields. Matchers // are specified as a repeated field inside the KeyBuilder proto. If the // order changes, it means that the order in the protobuf changed. We report - // this case as not being equal even though the builders could possible be + // this case as not being equal even though the builders could possibly be // functionally equal. for i, bMatcher := range b.headerKeys { aMatcher := a.headerKeys[i] diff --git a/vendor/google.golang.org/grpc/balancer/weightedroundrobin/balancer.go b/vendor/google.golang.org/grpc/balancer/weightedroundrobin/balancer.go index 88bf64ec4e..1ea9eba4c8 100644 --- a/vendor/google.golang.org/grpc/balancer/weightedroundrobin/balancer.go +++ b/vendor/google.golang.org/grpc/balancer/weightedroundrobin/balancer.go @@ -526,17 +526,21 @@ func (w *weightedSubConn) updateConfig(cfg *lbConfig) { w.cfg = cfg w.mu.Unlock() - newPeriod := cfg.OOBReportingPeriod if cfg.EnableOOBLoadReport == oldCfg.EnableOOBLoadReport && - newPeriod == oldCfg.OOBReportingPeriod { + cfg.OOBReportingPeriod == oldCfg.OOBReportingPeriod { // Load reporting wasn't enabled before or after, or load reporting was // enabled before and after, and had the same period. (Note that with // load reporting disabled, OOBReportingPeriod is always 0.) return } - // (Optionally stop and) start the listener to use the new config's - // settings for OOB reporting. + if w.connectivityState == connectivity.Ready { + // (Re)start the listener to use the new config's settings for OOB + // reporting. + w.updateORCAListener(cfg) + } +} +func (w *weightedSubConn) updateORCAListener(cfg *lbConfig) { if w.stopORCAListener != nil { w.stopORCAListener() } @@ -545,9 +549,9 @@ func (w *weightedSubConn) updateConfig(cfg *lbConfig) { return } if w.logger.V(2) { - w.logger.Infof("Registering ORCA listener for %v with interval %v", w.SubConn, newPeriod) + w.logger.Infof("Registering ORCA listener for %v with interval %v", w.SubConn, cfg.OOBReportingPeriod) } - opts := orca.OOBListenerOptions{ReportInterval: time.Duration(newPeriod)} + opts := orca.OOBListenerOptions{ReportInterval: time.Duration(cfg.OOBReportingPeriod)} w.stopORCAListener = orca.RegisterOOBListener(w.SubConn, w, opts) } @@ -569,11 +573,9 @@ func (w *weightedSubConn) updateConnectivityState(cs connectivity.State) connect w.mu.Lock() w.nonEmptySince = time.Time{} w.lastUpdated = time.Time{} + cfg := w.cfg w.mu.Unlock() - case connectivity.Shutdown: - if w.stopORCAListener != nil { - w.stopORCAListener() - } + w.updateORCAListener(cfg) } oldCS := w.connectivityState diff --git a/vendor/google.golang.org/grpc/balancer_wrapper.go b/vendor/google.golang.org/grpc/balancer_wrapper.go index 8ad6ce2f09..2a4f2878ae 100644 --- a/vendor/google.golang.org/grpc/balancer_wrapper.go +++ b/vendor/google.golang.org/grpc/balancer_wrapper.go @@ -24,12 +24,14 @@ import ( "sync" "google.golang.org/grpc/balancer" + "google.golang.org/grpc/codes" "google.golang.org/grpc/connectivity" "google.golang.org/grpc/internal" "google.golang.org/grpc/internal/balancer/gracefulswitch" "google.golang.org/grpc/internal/channelz" "google.golang.org/grpc/internal/grpcsync" "google.golang.org/grpc/resolver" + "google.golang.org/grpc/status" ) var setConnectedAddress = internal.SetConnectedAddress.(func(*balancer.SubConnState, resolver.Address)) @@ -256,8 +258,8 @@ type acBalancerWrapper struct { ccb *ccBalancerWrapper // read-only stateListener func(balancer.SubConnState) - mu sync.Mutex - producers map[balancer.ProducerBuilder]*refCountedProducer + producersMu sync.Mutex + producers map[balancer.ProducerBuilder]*refCountedProducer } // updateState is invoked by grpc to push a subConn state update to the @@ -267,6 +269,9 @@ func (acbw *acBalancerWrapper) updateState(s connectivity.State, curAddr resolve if ctx.Err() != nil || acbw.ccb.balancer == nil { return } + // Invalidate all producers on any state change. + acbw.closeProducers() + // Even though it is optional for balancers, gracefulswitch ensures // opts.StateListener is set, so this cannot ever be nil. // TODO: delete this comment when UpdateSubConnState is removed. @@ -275,16 +280,6 @@ func (acbw *acBalancerWrapper) updateState(s connectivity.State, curAddr resolve setConnectedAddress(&scs, curAddr) } acbw.stateListener(scs) - acbw.ac.mu.Lock() - defer acbw.ac.mu.Unlock() - if s == connectivity.Ready { - // When changing states to READY, reset stateReadyChan. Wait until - // after we notify the LB policy's listener(s) in order to prevent - // ac.getTransport() from unblocking before the LB policy starts - // tracking the subchannel as READY. - close(acbw.ac.stateReadyChan) - acbw.ac.stateReadyChan = make(chan struct{}) - } }) } @@ -301,6 +296,7 @@ func (acbw *acBalancerWrapper) Connect() { } func (acbw *acBalancerWrapper) Shutdown() { + acbw.closeProducers() acbw.ccb.cc.removeAddrConn(acbw.ac, errConnDrain) } @@ -308,9 +304,10 @@ func (acbw *acBalancerWrapper) Shutdown() { // ready, blocks until it is or ctx expires. Returns an error when the context // expires or the addrConn is shut down. func (acbw *acBalancerWrapper) NewStream(ctx context.Context, desc *StreamDesc, method string, opts ...CallOption) (ClientStream, error) { - transport, err := acbw.ac.getTransport(ctx) - if err != nil { - return nil, err + transport := acbw.ac.getReadyTransport() + if transport == nil { + return nil, status.Errorf(codes.Unavailable, "SubConn state is not Ready") + } return newNonRetryClientStream(ctx, desc, method, transport, acbw.ac, opts...) } @@ -335,8 +332,8 @@ type refCountedProducer struct { } func (acbw *acBalancerWrapper) GetOrBuildProducer(pb balancer.ProducerBuilder) (balancer.Producer, func()) { - acbw.mu.Lock() - defer acbw.mu.Unlock() + acbw.producersMu.Lock() + defer acbw.producersMu.Unlock() // Look up existing producer from this builder. pData := acbw.producers[pb] @@ -353,13 +350,26 @@ func (acbw *acBalancerWrapper) GetOrBuildProducer(pb balancer.ProducerBuilder) ( // and delete the refCountedProducer from the map if the total reference // count goes to zero. unref := func() { - acbw.mu.Lock() + acbw.producersMu.Lock() + // If closeProducers has already closed this producer instance, refs is + // set to 0, so the check after decrementing will never pass, and the + // producer will not be double-closed. pData.refs-- if pData.refs == 0 { defer pData.close() // Run outside the acbw mutex delete(acbw.producers, pb) } - acbw.mu.Unlock() + acbw.producersMu.Unlock() } return pData.producer, grpcsync.OnceFunc(unref) } + +func (acbw *acBalancerWrapper) closeProducers() { + acbw.producersMu.Lock() + defer acbw.producersMu.Unlock() + for pb, pData := range acbw.producers { + pData.refs = 0 + pData.close() + delete(acbw.producers, pb) + } +} diff --git a/vendor/google.golang.org/grpc/clientconn.go b/vendor/google.golang.org/grpc/clientconn.go index 9c8850e3fd..19763f8edd 100644 --- a/vendor/google.golang.org/grpc/clientconn.go +++ b/vendor/google.golang.org/grpc/clientconn.go @@ -825,14 +825,13 @@ func (cc *ClientConn) newAddrConnLocked(addrs []resolver.Address, opts balancer. } ac := &addrConn{ - state: connectivity.Idle, - cc: cc, - addrs: copyAddresses(addrs), - scopts: opts, - dopts: cc.dopts, - channelz: channelz.RegisterSubChannel(cc.channelz, ""), - resetBackoff: make(chan struct{}), - stateReadyChan: make(chan struct{}), + state: connectivity.Idle, + cc: cc, + addrs: copyAddresses(addrs), + scopts: opts, + dopts: cc.dopts, + channelz: channelz.RegisterSubChannel(cc.channelz, ""), + resetBackoff: make(chan struct{}), } ac.ctx, ac.cancel = context.WithCancel(cc.ctx) // Start with our address set to the first address; this may be updated if @@ -1141,10 +1140,15 @@ func (cc *ClientConn) Close() error { <-cc.resolverWrapper.serializer.Done() <-cc.balancerWrapper.serializer.Done() - + var wg sync.WaitGroup for ac := range conns { - ac.tearDown(ErrClientConnClosing) + wg.Add(1) + go func(ac *addrConn) { + defer wg.Done() + ac.tearDown(ErrClientConnClosing) + }(ac) } + wg.Wait() cc.addTraceEvent("deleted") // TraceEvent needs to be called before RemoveEntry, as TraceEvent may add // trace reference to the entity being deleted, and thus prevent it from being @@ -1179,8 +1183,7 @@ type addrConn struct { addrs []resolver.Address // All addresses that the resolver resolved to. // Use updateConnectivityState for updating addrConn's connectivity state. - state connectivity.State - stateReadyChan chan struct{} // closed and recreated on every READY state change. + state connectivity.State backoffIdx int // Needs to be stateful for resetConnectBackoff. resetBackoff chan struct{} @@ -1251,6 +1254,8 @@ func (ac *addrConn) resetTransportAndUnlock() { ac.mu.Unlock() if err := ac.tryAllAddrs(acCtx, addrs, connectDeadline); err != nil { + // TODO: #7534 - Move re-resolution requests into the pick_first LB policy + // to ensure one resolution request per pass instead of per subconn failure. ac.cc.resolveNow(resolver.ResolveNowOptions{}) ac.mu.Lock() if acCtx.Err() != nil { @@ -1292,7 +1297,7 @@ func (ac *addrConn) resetTransportAndUnlock() { ac.mu.Unlock() } -// tryAllAddrs tries to creates a connection to the addresses, and stop when at +// tryAllAddrs tries to create a connection to the addresses, and stop when at // the first successful one. It returns an error if no address was successfully // connected, or updates ac appropriately with the new transport. func (ac *addrConn) tryAllAddrs(ctx context.Context, addrs []resolver.Address, connectDeadline time.Time) error { @@ -1504,29 +1509,6 @@ func (ac *addrConn) getReadyTransport() transport.ClientTransport { return nil } -// getTransport waits until the addrconn is ready and returns the transport. -// If the context expires first, returns an appropriate status. If the -// addrConn is stopped first, returns an Unavailable status error. -func (ac *addrConn) getTransport(ctx context.Context) (transport.ClientTransport, error) { - for ctx.Err() == nil { - ac.mu.Lock() - t, state, sc := ac.transport, ac.state, ac.stateReadyChan - ac.mu.Unlock() - if state == connectivity.Ready { - return t, nil - } - if state == connectivity.Shutdown { - return nil, status.Errorf(codes.Unavailable, "SubConn shutting down") - } - - select { - case <-ctx.Done(): - case <-sc: - } - } - return nil, status.FromContextError(ctx.Err()).Err() -} - // tearDown starts to tear down the addrConn. // // Note that tearDown doesn't remove ac from ac.cc.conns, so the addrConn struct diff --git a/vendor/google.golang.org/grpc/credentials/tls.go b/vendor/google.golang.org/grpc/credentials/tls.go index 4114358545..e163a473df 100644 --- a/vendor/google.golang.org/grpc/credentials/tls.go +++ b/vendor/google.golang.org/grpc/credentials/tls.go @@ -200,25 +200,40 @@ var tls12ForbiddenCipherSuites = map[uint16]struct{}{ // NewTLS uses c to construct a TransportCredentials based on TLS. func NewTLS(c *tls.Config) TransportCredentials { - tc := &tlsCreds{credinternal.CloneTLSConfig(c)} - tc.config.NextProtos = credinternal.AppendH2ToNextProtos(tc.config.NextProtos) + config := applyDefaults(c) + if config.GetConfigForClient != nil { + oldFn := config.GetConfigForClient + config.GetConfigForClient = func(hello *tls.ClientHelloInfo) (*tls.Config, error) { + cfgForClient, err := oldFn(hello) + if err != nil || cfgForClient == nil { + return cfgForClient, err + } + return applyDefaults(cfgForClient), nil + } + } + return &tlsCreds{config: config} +} + +func applyDefaults(c *tls.Config) *tls.Config { + config := credinternal.CloneTLSConfig(c) + config.NextProtos = credinternal.AppendH2ToNextProtos(config.NextProtos) // If the user did not configure a MinVersion and did not configure a // MaxVersion < 1.2, use MinVersion=1.2, which is required by // https://datatracker.ietf.org/doc/html/rfc7540#section-9.2 - if tc.config.MinVersion == 0 && (tc.config.MaxVersion == 0 || tc.config.MaxVersion >= tls.VersionTLS12) { - tc.config.MinVersion = tls.VersionTLS12 + if config.MinVersion == 0 && (config.MaxVersion == 0 || config.MaxVersion >= tls.VersionTLS12) { + config.MinVersion = tls.VersionTLS12 } // If the user did not configure CipherSuites, use all "secure" cipher // suites reported by the TLS package, but remove some explicitly forbidden // by https://datatracker.ietf.org/doc/html/rfc7540#appendix-A - if tc.config.CipherSuites == nil { + if config.CipherSuites == nil { for _, cs := range tls.CipherSuites() { if _, ok := tls12ForbiddenCipherSuites[cs.ID]; !ok { - tc.config.CipherSuites = append(tc.config.CipherSuites, cs.ID) + config.CipherSuites = append(config.CipherSuites, cs.ID) } } } - return tc + return config } // NewClientTLSFromCert constructs TLS credentials from the provided root diff --git a/vendor/google.golang.org/grpc/credentials/tls/certprovider/pemfile/builder.go b/vendor/google.golang.org/grpc/credentials/tls/certprovider/pemfile/builder.go index 8c15baeb59..ad4207892b 100644 --- a/vendor/google.golang.org/grpc/credentials/tls/certprovider/pemfile/builder.go +++ b/vendor/google.golang.org/grpc/credentials/tls/certprovider/pemfile/builder.go @@ -29,6 +29,7 @@ import ( ) const ( + // PluginName is the name of the PEM file watcher plugin. PluginName = "file_watcher" defaultRefreshInterval = 10 * time.Minute ) diff --git a/vendor/google.golang.org/grpc/dialoptions.go b/vendor/google.golang.org/grpc/dialoptions.go index 2b285beee3..518692c3af 100644 --- a/vendor/google.golang.org/grpc/dialoptions.go +++ b/vendor/google.golang.org/grpc/dialoptions.go @@ -436,7 +436,7 @@ func WithTimeout(d time.Duration) DialOption { // option to true from the Control field. For a concrete example of how to do // this, see internal.NetDialerWithTCPKeepalive(). // -// For more information, please see [issue 23459] in the Go github repo. +// For more information, please see [issue 23459] in the Go GitHub repo. // // [issue 23459]: https://github.com/golang/go/issues/23459 func WithContextDialer(f func(context.Context, string) (net.Conn, error)) DialOption { diff --git a/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/config.go b/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/config.go index 13821a9266..85540f86a7 100644 --- a/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/config.go +++ b/vendor/google.golang.org/grpc/internal/balancer/gracefulswitch/config.go @@ -33,6 +33,8 @@ type lbConfig struct { childConfig serviceconfig.LoadBalancingConfig } +// ChildName returns the name of the child balancer of the gracefulswitch +// Balancer. func ChildName(l serviceconfig.LoadBalancingConfig) string { return l.(*lbConfig).childBuilder.Name() } diff --git a/vendor/google.golang.org/grpc/internal/channelz/channel.go b/vendor/google.golang.org/grpc/internal/channelz/channel.go index d7e9e1d54e..3ec662799a 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/channel.go +++ b/vendor/google.golang.org/grpc/internal/channelz/channel.go @@ -43,6 +43,8 @@ type Channel struct { // Non-zero traceRefCount means the trace of this channel cannot be deleted. traceRefCount int32 + // ChannelMetrics holds connectivity state, target and call metrics for the + // channel within channelz. ChannelMetrics ChannelMetrics } @@ -50,6 +52,8 @@ type Channel struct { // nesting. func (c *Channel) channelzIdentifier() {} +// String returns a string representation of the Channel, including its parent +// entity and ID. func (c *Channel) String() string { if c.Parent == nil { return fmt.Sprintf("Channel #%d", c.ID) @@ -61,24 +65,31 @@ func (c *Channel) id() int64 { return c.ID } +// SubChans returns a copy of the map of sub-channels associated with the +// Channel. func (c *Channel) SubChans() map[int64]string { db.mu.RLock() defer db.mu.RUnlock() return copyMap(c.subChans) } +// NestedChans returns a copy of the map of nested channels associated with the +// Channel. func (c *Channel) NestedChans() map[int64]string { db.mu.RLock() defer db.mu.RUnlock() return copyMap(c.nestedChans) } +// Trace returns a copy of the Channel's trace data. func (c *Channel) Trace() *ChannelTrace { db.mu.RLock() defer db.mu.RUnlock() return c.trace.copy() } +// ChannelMetrics holds connectivity state, target and call metrics for the +// channel within channelz. type ChannelMetrics struct { // The current connectivity state of the channel. State atomic.Pointer[connectivity.State] @@ -136,12 +147,16 @@ func strFromPointer(s *string) string { return *s } +// String returns a string representation of the ChannelMetrics, including its +// state, target, and call metrics. func (c *ChannelMetrics) String() string { return fmt.Sprintf("State: %v, Target: %s, CallsStarted: %v, CallsSucceeded: %v, CallsFailed: %v, LastCallStartedTimestamp: %v", c.State.Load(), strFromPointer(c.Target.Load()), c.CallsStarted.Load(), c.CallsSucceeded.Load(), c.CallsFailed.Load(), c.LastCallStartedTimestamp.Load(), ) } +// NewChannelMetricForTesting creates a new instance of ChannelMetrics with +// specified initial values for testing purposes. func NewChannelMetricForTesting(state connectivity.State, target string, started, succeeded, failed, timestamp int64) *ChannelMetrics { c := &ChannelMetrics{} c.State.Store(&state) diff --git a/vendor/google.golang.org/grpc/internal/channelz/server.go b/vendor/google.golang.org/grpc/internal/channelz/server.go index cdfc49d6ea..b5a8249929 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/server.go +++ b/vendor/google.golang.org/grpc/internal/channelz/server.go @@ -59,6 +59,8 @@ func NewServerMetricsForTesting(started, succeeded, failed, timestamp int64) *Se return sm } +// CopyFrom copies the metrics data from the provided ServerMetrics +// instance into the current instance. func (sm *ServerMetrics) CopyFrom(o *ServerMetrics) { sm.CallsStarted.Store(o.CallsStarted.Load()) sm.CallsSucceeded.Store(o.CallsSucceeded.Load()) diff --git a/vendor/google.golang.org/grpc/internal/channelz/socket.go b/vendor/google.golang.org/grpc/internal/channelz/socket.go index fa64834b25..90103847c5 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/socket.go +++ b/vendor/google.golang.org/grpc/internal/channelz/socket.go @@ -70,13 +70,18 @@ type EphemeralSocketMetrics struct { RemoteFlowControlWindow int64 } +// SocketType represents the type of socket. type SocketType string +// SocketType can be one of these. const ( SocketTypeNormal = "NormalSocket" SocketTypeListen = "ListenSocket" ) +// Socket represents a socket within channelz which includes socket +// metrics and data related to socket activity and provides methods +// for managing and interacting with sockets. type Socket struct { Entity SocketType SocketType @@ -100,6 +105,8 @@ type Socket struct { Security credentials.ChannelzSecurityValue } +// String returns a string representation of the Socket, including its parent +// entity, socket type, and ID. func (ls *Socket) String() string { return fmt.Sprintf("%s %s #%d", ls.Parent, ls.SocketType, ls.ID) } diff --git a/vendor/google.golang.org/grpc/internal/channelz/subchannel.go b/vendor/google.golang.org/grpc/internal/channelz/subchannel.go index 3b88e4cba8..b20802e6e9 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/subchannel.go +++ b/vendor/google.golang.org/grpc/internal/channelz/subchannel.go @@ -47,12 +47,14 @@ func (sc *SubChannel) id() int64 { return sc.ID } +// Sockets returns a copy of the sockets map associated with the SubChannel. func (sc *SubChannel) Sockets() map[int64]string { db.mu.RLock() defer db.mu.RUnlock() return copyMap(sc.sockets) } +// Trace returns a copy of the ChannelTrace associated with the SubChannel. func (sc *SubChannel) Trace() *ChannelTrace { db.mu.RLock() defer db.mu.RUnlock() diff --git a/vendor/google.golang.org/grpc/internal/channelz/trace.go b/vendor/google.golang.org/grpc/internal/channelz/trace.go index 36b8674032..2bffe47776 100644 --- a/vendor/google.golang.org/grpc/internal/channelz/trace.go +++ b/vendor/google.golang.org/grpc/internal/channelz/trace.go @@ -79,13 +79,21 @@ type TraceEvent struct { Parent *TraceEvent } +// ChannelTrace provides tracing information for a channel. +// It tracks various events and metadata related to the channel's lifecycle +// and operations. type ChannelTrace struct { - cm *channelMap - clearCalled bool + cm *channelMap + clearCalled bool + // The time when the trace was created. CreationTime time.Time - EventNum int64 - mu sync.Mutex - Events []*traceEvent + // A counter for the number of events recorded in the + // trace. + EventNum int64 + mu sync.Mutex + // A slice of traceEvent pointers representing the events recorded for + // this channel. + Events []*traceEvent } func (c *ChannelTrace) copy() *ChannelTrace { @@ -175,6 +183,7 @@ var refChannelTypeToString = map[RefChannelType]string{ RefNormalSocket: "NormalSocket", } +// String returns a string representation of the RefChannelType func (r RefChannelType) String() string { return refChannelTypeToString[r] } diff --git a/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go index 452985f8d8..6e7dd6b772 100644 --- a/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go +++ b/vendor/google.golang.org/grpc/internal/envconfig/envconfig.go @@ -50,6 +50,11 @@ var ( // xDS fallback is turned on. If this is unset or is false, only the first // xDS server in the list of server configs will be used. XDSFallbackSupport = boolFromEnv("GRPC_EXPERIMENTAL_XDS_FALLBACK", false) + // NewPickFirstEnabled is set if the new pickfirst leaf policy is to be used + // instead of the exiting pickfirst implementation. This can be enabled by + // setting the environment variable "GRPC_EXPERIMENTAL_ENABLE_NEW_PICK_FIRST" + // to "true". + NewPickFirstEnabled = boolFromEnv("GRPC_EXPERIMENTAL_ENABLE_NEW_PICK_FIRST", false) ) func boolFromEnv(envVar string, def bool) bool { diff --git a/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go b/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go index 19b9d63927..8e8e861280 100644 --- a/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go +++ b/vendor/google.golang.org/grpc/internal/grpcsync/callback_serializer.go @@ -53,7 +53,7 @@ func NewCallbackSerializer(ctx context.Context) *CallbackSerializer { return cs } -// TrySchedule tries to schedules the provided callback function f to be +// TrySchedule tries to schedule the provided callback function f to be // executed in the order it was added. This is a best-effort operation. If the // context passed to NewCallbackSerializer was canceled before this method is // called, the callback will not be scheduled. diff --git a/vendor/google.golang.org/grpc/internal/grpcutil/method.go b/vendor/google.golang.org/grpc/internal/grpcutil/method.go index ec62b4775e..683d1955c6 100644 --- a/vendor/google.golang.org/grpc/internal/grpcutil/method.go +++ b/vendor/google.golang.org/grpc/internal/grpcutil/method.go @@ -39,7 +39,7 @@ func ParseMethod(methodName string) (service, method string, _ error) { } // baseContentType is the base content-type for gRPC. This is a valid -// content-type on it's own, but can also include a content-subtype such as +// content-type on its own, but can also include a content-subtype such as // "proto" as a suffix after "+" or ";". See // https://github.com/grpc/grpc/blob/master/doc/PROTOCOL-HTTP2.md#requests // for more details. diff --git a/vendor/google.golang.org/grpc/internal/idle/idle.go b/vendor/google.golang.org/grpc/internal/idle/idle.go index fe49cb74c5..2c13ee9dac 100644 --- a/vendor/google.golang.org/grpc/internal/idle/idle.go +++ b/vendor/google.golang.org/grpc/internal/idle/idle.go @@ -182,6 +182,7 @@ func (m *Manager) tryEnterIdleMode() bool { return true } +// EnterIdleModeForTesting instructs the channel to enter idle mode. func (m *Manager) EnterIdleModeForTesting() { m.tryEnterIdleMode() } @@ -225,7 +226,7 @@ func (m *Manager) ExitIdleMode() error { // came in and OnCallBegin() noticed that the calls count is negative. // - Channel is in idle mode, and multiple new RPCs come in at the same // time, all of them notice a negative calls count in OnCallBegin and get - // here. The first one to get the lock would got the channel to exit idle. + // here. The first one to get the lock would get the channel to exit idle. // - Channel is not in idle mode, and the user calls Connect which calls // m.ExitIdleMode. // @@ -266,6 +267,7 @@ func (m *Manager) isClosed() bool { return atomic.LoadInt32(&m.closed) == 1 } +// Close stops the timer associated with the Manager, if it exists. func (m *Manager) Close() { atomic.StoreInt32(&m.closed, 1) diff --git a/vendor/google.golang.org/grpc/internal/internal.go b/vendor/google.golang.org/grpc/internal/internal.go index 7aae9240ff..20b4dc3d35 100644 --- a/vendor/google.golang.org/grpc/internal/internal.go +++ b/vendor/google.golang.org/grpc/internal/internal.go @@ -191,6 +191,8 @@ var ( // ExitIdleModeForTesting gets the ClientConn to exit IDLE mode. ExitIdleModeForTesting any // func(*grpc.ClientConn) error + // ChannelzTurnOffForTesting disables the Channelz service for testing + // purposes. ChannelzTurnOffForTesting func() // TriggerXDSResourceNotFoundForTesting causes the provided xDS Client to @@ -205,10 +207,6 @@ var ( // default resolver scheme. UserSetDefaultScheme = false - // ShuffleAddressListForTesting pseudo-randomizes the order of addresses. n - // is the number of elements. swap swaps the elements with indexes i and j. - ShuffleAddressListForTesting any // func(n int, swap func(i, j int)) - // ConnectedAddress returns the connected address for a SubConnState. The // address is only valid if the state is READY. ConnectedAddress any // func (scs SubConnState) resolver.Address @@ -235,7 +233,7 @@ var ( // // The implementation is expected to create a health checking RPC stream by // calling newStream(), watch for the health status of serviceName, and report -// it's health back by calling setConnectivityState(). +// its health back by calling setConnectivityState(). // // The health checking protocol is defined at: // https://github.com/grpc/grpc/blob/master/doc/health-checking.md diff --git a/vendor/google.golang.org/grpc/internal/resolver/dns/dns_resolver.go b/vendor/google.golang.org/grpc/internal/resolver/dns/dns_resolver.go index 4552db16b0..8691698ef2 100644 --- a/vendor/google.golang.org/grpc/internal/resolver/dns/dns_resolver.go +++ b/vendor/google.golang.org/grpc/internal/resolver/dns/dns_resolver.go @@ -177,7 +177,7 @@ type dnsResolver struct { // finished. Otherwise, data race will be possible. [Race Example] in // dns_resolver_test we replace the real lookup functions with mocked ones to // facilitate testing. If Close() doesn't wait for watcher() goroutine - // finishes, race detector sometimes will warns lookup (READ the lookup + // finishes, race detector sometimes will warn lookup (READ the lookup // function pointers) inside watcher() goroutine has data race with // replaceNetFunc (WRITE the lookup function pointers). wg sync.WaitGroup diff --git a/vendor/google.golang.org/grpc/internal/stats/metrics_recorder_list.go b/vendor/google.golang.org/grpc/internal/stats/metrics_recorder_list.go index be110d41f9..79044657be 100644 --- a/vendor/google.golang.org/grpc/internal/stats/metrics_recorder_list.go +++ b/vendor/google.golang.org/grpc/internal/stats/metrics_recorder_list.go @@ -54,6 +54,8 @@ func verifyLabels(desc *estats.MetricDescriptor, labelsRecv ...string) { } } +// RecordInt64Count records the measurement alongside labels on the int +// count associated with the provided handle. func (l *MetricsRecorderList) RecordInt64Count(handle *estats.Int64CountHandle, incr int64, labels ...string) { verifyLabels(handle.Descriptor(), labels...) @@ -62,6 +64,8 @@ func (l *MetricsRecorderList) RecordInt64Count(handle *estats.Int64CountHandle, } } +// RecordFloat64Count records the measurement alongside labels on the float +// count associated with the provided handle. func (l *MetricsRecorderList) RecordFloat64Count(handle *estats.Float64CountHandle, incr float64, labels ...string) { verifyLabels(handle.Descriptor(), labels...) @@ -70,6 +74,8 @@ func (l *MetricsRecorderList) RecordFloat64Count(handle *estats.Float64CountHand } } +// RecordInt64Histo records the measurement alongside labels on the int +// histo associated with the provided handle. func (l *MetricsRecorderList) RecordInt64Histo(handle *estats.Int64HistoHandle, incr int64, labels ...string) { verifyLabels(handle.Descriptor(), labels...) @@ -78,6 +84,8 @@ func (l *MetricsRecorderList) RecordInt64Histo(handle *estats.Int64HistoHandle, } } +// RecordFloat64Histo records the measurement alongside labels on the float +// histo associated with the provided handle. func (l *MetricsRecorderList) RecordFloat64Histo(handle *estats.Float64HistoHandle, incr float64, labels ...string) { verifyLabels(handle.Descriptor(), labels...) @@ -86,6 +94,8 @@ func (l *MetricsRecorderList) RecordFloat64Histo(handle *estats.Float64HistoHand } } +// RecordInt64Gauge records the measurement alongside labels on the int +// gauge associated with the provided handle. func (l *MetricsRecorderList) RecordInt64Gauge(handle *estats.Int64GaugeHandle, incr int64, labels ...string) { verifyLabels(handle.Descriptor(), labels...) diff --git a/vendor/google.golang.org/grpc/internal/status/status.go b/vendor/google.golang.org/grpc/internal/status/status.go index 757925381f..1186f1e9a9 100644 --- a/vendor/google.golang.org/grpc/internal/status/status.go +++ b/vendor/google.golang.org/grpc/internal/status/status.go @@ -149,6 +149,8 @@ func (s *Status) WithDetails(details ...protoadapt.MessageV1) (*Status, error) { // Details returns a slice of details messages attached to the status. // If a detail cannot be decoded, the error is returned in place of the detail. +// If the detail can be decoded, the proto message returned is of the same +// type that was given to WithDetails(). func (s *Status) Details() []any { if s == nil || s.s == nil { return nil @@ -160,7 +162,38 @@ func (s *Status) Details() []any { details = append(details, err) continue } - details = append(details, detail) + // The call to MessageV1Of is required to unwrap the proto message if + // it implemented only the MessageV1 API. The proto message would have + // been wrapped in a V2 wrapper in Status.WithDetails. V2 messages are + // added to a global registry used by any.UnmarshalNew(). + // MessageV1Of has the following behaviour: + // 1. If the given message is a wrapped MessageV1, it returns the + // unwrapped value. + // 2. If the given message already implements MessageV1, it returns it + // as is. + // 3. Else, it wraps the MessageV2 in a MessageV1 wrapper. + // + // Since the Status.WithDetails() API only accepts MessageV1, calling + // MessageV1Of ensures we return the same type that was given to + // WithDetails: + // * If the give type implemented only MessageV1, the unwrapping from + // point 1 above will restore the type. + // * If the given type implemented both MessageV1 and MessageV2, point 2 + // above will ensure no wrapping is performed. + // * If the given type implemented only MessageV2 and was wrapped using + // MessageV1Of before passing to WithDetails(), it would be unwrapped + // in WithDetails by calling MessageV2Of(). Point 3 above will ensure + // that the type is wrapped in a MessageV1 wrapper again before + // returning. Note that protoc-gen-go doesn't generate code which + // implements ONLY MessageV2 at the time of writing. + // + // NOTE: Status details can also be added using the FromProto method. + // This could theoretically allow passing a Detail message that only + // implements the V2 API. In such a case the message will be wrapped in + // a MessageV1 wrapper when fetched using Details(). + // Since protoc-gen-go generates only code that implements both V1 and + // V2 APIs for backward compatibility, this is not a concern. + details = append(details, protoadapt.MessageV1Of(detail)) } return details } diff --git a/vendor/google.golang.org/grpc/internal/transport/http2_client.go b/vendor/google.golang.org/grpc/internal/transport/http2_client.go index c769deab53..62b81885d8 100644 --- a/vendor/google.golang.org/grpc/internal/transport/http2_client.go +++ b/vendor/google.golang.org/grpc/internal/transport/http2_client.go @@ -86,9 +86,9 @@ type http2Client struct { writerDone chan struct{} // sync point to enable testing. // goAway is closed to notify the upper layer (i.e., addrConn.transportMonitor) // that the server sent GoAway on this transport. - goAway chan struct{} - - framer *framer + goAway chan struct{} + keepaliveDone chan struct{} // Closed when the keepalive goroutine exits. + framer *framer // controlBuf delivers all the control related tasks (e.g., window // updates, reset streams, and various settings) to the controller. // Do not access controlBuf with mu held. @@ -335,6 +335,7 @@ func newHTTP2Client(connectCtx, ctx context.Context, addr resolver.Address, opts readerDone: make(chan struct{}), writerDone: make(chan struct{}), goAway: make(chan struct{}), + keepaliveDone: make(chan struct{}), framer: newFramer(conn, writeBufSize, readBufSize, opts.SharedWriteBuffer, maxHeaderListSize), fc: &trInFlow{limit: uint32(icwz)}, scheme: scheme, @@ -527,8 +528,9 @@ func (t *http2Client) getPeer() *peer.Peer { // to be the last frame loopy writes to the transport. func (t *http2Client) outgoingGoAwayHandler(g *goAway) (bool, error) { t.mu.Lock() - defer t.mu.Unlock() - if err := t.framer.fr.WriteGoAway(t.nextID-2, http2.ErrCodeNo, g.debugData); err != nil { + maxStreamID := t.nextID - 2 + t.mu.Unlock() + if err := t.framer.fr.WriteGoAway(maxStreamID, http2.ErrCodeNo, g.debugData); err != nil { return false, err } return false, g.closeConn @@ -1008,6 +1010,9 @@ func (t *http2Client) Close(err error) { // should unblock it so that the goroutine eventually exits. t.kpDormancyCond.Signal() } + // Append info about previous goaways if there were any, since this may be important + // for understanding the root cause for this connection to be closed. + goAwayDebugMessage := t.goAwayDebugMessage t.mu.Unlock() // Per HTTP/2 spec, a GOAWAY frame must be sent before closing the @@ -1025,11 +1030,13 @@ func (t *http2Client) Close(err error) { } t.cancel() t.conn.Close() + // Waits for the reader and keepalive goroutines to exit before returning to + // ensure all resources are cleaned up before Close can return. + <-t.readerDone + if t.keepaliveEnabled { + <-t.keepaliveDone + } channelz.RemoveEntry(t.channelz.ID) - // Append info about previous goaways if there were any, since this may be important - // for understanding the root cause for this connection to be closed. - _, goAwayDebugMessage := t.GetGoAwayReason() - var st *status.Status if len(goAwayDebugMessage) > 0 { st = status.Newf(codes.Unavailable, "closing transport due to: %v, received prior goaway: %v", err, goAwayDebugMessage) @@ -1316,11 +1323,11 @@ func (t *http2Client) handlePing(f *http2.PingFrame) { t.controlBuf.put(pingAck) } -func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { +func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) error { t.mu.Lock() if t.state == closing { t.mu.Unlock() - return + return nil } if f.ErrCode == http2.ErrCodeEnhanceYourCalm && string(f.DebugData()) == "too_many_pings" { // When a client receives a GOAWAY with error code ENHANCE_YOUR_CALM and debug @@ -1332,8 +1339,7 @@ func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { id := f.LastStreamID if id > 0 && id%2 == 0 { t.mu.Unlock() - t.Close(connectionErrorf(true, nil, "received goaway with non-zero even-numbered stream id: %v", id)) - return + return connectionErrorf(true, nil, "received goaway with non-zero even-numbered stream id: %v", id) } // A client can receive multiple GoAways from the server (see // https://github.com/grpc/grpc-go/issues/1387). The idea is that the first @@ -1350,8 +1356,7 @@ func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { // If there are multiple GoAways the first one should always have an ID greater than the following ones. if id > t.prevGoAwayID { t.mu.Unlock() - t.Close(connectionErrorf(true, nil, "received goaway with stream id: %v, which exceeds stream id of previous goaway: %v", id, t.prevGoAwayID)) - return + return connectionErrorf(true, nil, "received goaway with stream id: %v, which exceeds stream id of previous goaway: %v", id, t.prevGoAwayID) } default: t.setGoAwayReason(f) @@ -1375,8 +1380,7 @@ func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { t.prevGoAwayID = id if len(t.activeStreams) == 0 { t.mu.Unlock() - t.Close(connectionErrorf(true, nil, "received goaway and there are no active streams")) - return + return connectionErrorf(true, nil, "received goaway and there are no active streams") } streamsToClose := make([]*Stream, 0) @@ -1393,6 +1397,7 @@ func (t *http2Client) handleGoAway(f *http2.GoAwayFrame) { for _, stream := range streamsToClose { t.closeStream(stream, errStreamDrain, false, http2.ErrCodeNo, statusGoAway, nil, false) } + return nil } // setGoAwayReason sets the value of t.goAwayReason based @@ -1628,7 +1633,13 @@ func (t *http2Client) readServerPreface() error { // network connection. If the server preface is not read successfully, an // error is pushed to errCh; otherwise errCh is closed with no error. func (t *http2Client) reader(errCh chan<- error) { - defer close(t.readerDone) + var errClose error + defer func() { + close(t.readerDone) + if errClose != nil { + t.Close(errClose) + } + }() if err := t.readServerPreface(); err != nil { errCh <- err @@ -1669,7 +1680,7 @@ func (t *http2Client) reader(errCh chan<- error) { continue } // Transport error. - t.Close(connectionErrorf(true, err, "error reading from server: %v", err)) + errClose = connectionErrorf(true, err, "error reading from server: %v", err) return } switch frame := frame.(type) { @@ -1684,7 +1695,7 @@ func (t *http2Client) reader(errCh chan<- error) { case *http2.PingFrame: t.handlePing(frame) case *http2.GoAwayFrame: - t.handleGoAway(frame) + errClose = t.handleGoAway(frame) case *http2.WindowUpdateFrame: t.handleWindowUpdate(frame) default: @@ -1697,6 +1708,13 @@ func (t *http2Client) reader(errCh chan<- error) { // keepalive running in a separate goroutine makes sure the connection is alive by sending pings. func (t *http2Client) keepalive() { + var err error + defer func() { + close(t.keepaliveDone) + if err != nil { + t.Close(err) + } + }() p := &ping{data: [8]byte{}} // True iff a ping has been sent, and no data has been received since then. outstandingPing := false @@ -1720,7 +1738,7 @@ func (t *http2Client) keepalive() { continue } if outstandingPing && timeoutLeft <= 0 { - t.Close(connectionErrorf(true, nil, "keepalive ping failed to receive ACK within timeout")) + err = connectionErrorf(true, nil, "keepalive ping failed to receive ACK within timeout") return } t.mu.Lock() diff --git a/vendor/google.golang.org/grpc/internal/transport/transport.go b/vendor/google.golang.org/grpc/internal/transport/transport.go index 924ba4f365..e12cb0bc91 100644 --- a/vendor/google.golang.org/grpc/internal/transport/transport.go +++ b/vendor/google.golang.org/grpc/internal/transport/transport.go @@ -547,6 +547,15 @@ func (s *Stream) write(m recvMsg) { s.buf.put(m) } +// ReadHeader reads data into the provided header slice from the stream. It +// first checks if there was an error during a previous read operation and +// returns it if present. It then requests a read operation for the length of +// the header. It continues to read from the stream until the entire header +// slice is filled or an error occurs. If an `io.EOF` error is encountered +// with partially read data, it is converted to `io.ErrUnexpectedEOF` to +// indicate an unexpected end of the stream. The method returns any error +// encountered during the read process or nil if the header was successfully +// read. func (s *Stream) ReadHeader(header []byte) (err error) { // Don't request a read if there was an error earlier if er := s.trReader.er; er != nil { diff --git a/vendor/google.golang.org/grpc/internal/xds/bootstrap/bootstrap.go b/vendor/google.golang.org/grpc/internal/xds/bootstrap/bootstrap.go index 8317859e1e..35aeea701a 100644 --- a/vendor/google.golang.org/grpc/internal/xds/bootstrap/bootstrap.go +++ b/vendor/google.golang.org/grpc/internal/xds/bootstrap/bootstrap.go @@ -22,9 +22,11 @@ package bootstrap import ( "bytes" + "context" "encoding/json" "fmt" "maps" + "net" "net/url" "os" "slices" @@ -179,6 +181,7 @@ type ServerConfig struct { // credentials and store it here for easy access. selectedCreds ChannelCreds credsDialOption grpc.DialOption + dialerOption grpc.DialOption cleanups []func() } @@ -217,10 +220,14 @@ func (sc *ServerConfig) ServerFeaturesIgnoreResourceDeletion() bool { return false } -// CredsDialOption returns the first supported transport credentials from the -// configuration, as a dial option. -func (sc *ServerConfig) CredsDialOption() grpc.DialOption { - return sc.credsDialOption +// DialOptions returns a slice of all the configured dial options for this +// server. +func (sc *ServerConfig) DialOptions() []grpc.DialOption { + dopts := []grpc.DialOption{sc.credsDialOption} + if sc.dialerOption != nil { + dopts = append(dopts, sc.dialerOption) + } + return dopts } // Cleanups returns a collection of functions to be called when the xDS client @@ -275,6 +282,12 @@ func (sc *ServerConfig) MarshalJSON() ([]byte, error) { return json.Marshal(server) } +// dialer captures the Dialer method specified via the credentials bundle. +type dialer interface { + // Dialer specifies how to dial the xDS server. + Dialer(context.Context, string) (net.Conn, error) +} + // UnmarshalJSON takes the json data (a server) and unmarshals it to the struct. func (sc *ServerConfig) UnmarshalJSON(data []byte) error { server := serverConfigJSON{} @@ -298,6 +311,9 @@ func (sc *ServerConfig) UnmarshalJSON(data []byte) error { } sc.selectedCreds = cc sc.credsDialOption = grpc.WithCredentialsBundle(bundle) + if d, ok := bundle.(dialer); ok { + sc.dialerOption = grpc.WithContextDialer(d.Dialer) + } sc.cleanups = append(sc.cleanups, cancel) break } diff --git a/vendor/google.golang.org/grpc/internal/xds/matcher/matcher_header.go b/vendor/google.golang.org/grpc/internal/xds/matcher/matcher_header.go index 01433f4122..1893526c53 100644 --- a/vendor/google.golang.org/grpc/internal/xds/matcher/matcher_header.go +++ b/vendor/google.golang.org/grpc/internal/xds/matcher/matcher_header.go @@ -222,7 +222,7 @@ type HeaderContainsMatcher struct { // NewHeaderContainsMatcher returns a new HeaderContainsMatcher. key is the HTTP // Header key to match on, and contains is the value that the header should -// should contain for a successful match. An empty contains string does not +// contain for a successful match. An empty contains string does not // work, use HeaderPresentMatcher in that case. func NewHeaderContainsMatcher(key string, contains string, invert bool) *HeaderContainsMatcher { return &HeaderContainsMatcher{key: key, contains: contains, invert: invert} diff --git a/vendor/google.golang.org/grpc/mem/buffers.go b/vendor/google.golang.org/grpc/mem/buffers.go index 4d66b2ccc2..ecbf0b9a73 100644 --- a/vendor/google.golang.org/grpc/mem/buffers.go +++ b/vendor/google.golang.org/grpc/mem/buffers.go @@ -65,6 +65,9 @@ var ( refObjectPool = sync.Pool{New: func() any { return new(atomic.Int32) }} ) +// IsBelowBufferPoolingThreshold returns true if the given size is less than or +// equal to the threshold for buffer pooling. This is used to determine whether +// to pool buffers or allocate them directly. func IsBelowBufferPoolingThreshold(size int) bool { return size <= bufferPoolingThreshold } @@ -89,7 +92,11 @@ func newBuffer() *buffer { // // Note that the backing array of the given data is not copied. func NewBuffer(data *[]byte, pool BufferPool) Buffer { - if pool == nil || IsBelowBufferPoolingThreshold(len(*data)) { + // Use the buffer's capacity instead of the length, otherwise buffers may + // not be reused under certain conditions. For example, if a large buffer + // is acquired from the pool, but fewer bytes than the buffering threshold + // are written to it, the buffer will not be returned to the pool. + if pool == nil || IsBelowBufferPoolingThreshold(cap(*data)) { return (SliceBuffer)(*data) } b := newBuffer() @@ -194,19 +201,19 @@ func (b *buffer) read(buf []byte) (int, Buffer) { return n, b } -// String returns a string representation of the buffer. May be used for -// debugging purposes. func (b *buffer) String() string { return fmt.Sprintf("mem.Buffer(%p, data: %p, length: %d)", b, b.ReadOnlyData(), len(b.ReadOnlyData())) } +// ReadUnsafe reads bytes from the given Buffer into the provided slice. +// It does not perform safety checks. func ReadUnsafe(dst []byte, buf Buffer) (int, Buffer) { return buf.read(dst) } // SplitUnsafe modifies the receiver to point to the first n bytes while it -// returns a new reference to the remaining bytes. The returned Buffer functions -// just like a normal reference acquired using Ref(). +// returns a new reference to the remaining bytes. The returned Buffer +// functions just like a normal reference acquired using Ref(). func SplitUnsafe(buf Buffer, n int) (left, right Buffer) { return buf.split(n) } @@ -232,12 +239,21 @@ func (e emptyBuffer) read([]byte) (int, Buffer) { return 0, e } +// SliceBuffer is a Buffer implementation that wraps a byte slice. It provides +// methods for reading, splitting, and managing the byte slice. type SliceBuffer []byte +// ReadOnlyData returns the byte slice. func (s SliceBuffer) ReadOnlyData() []byte { return s } -func (s SliceBuffer) Ref() {} -func (s SliceBuffer) Free() {} -func (s SliceBuffer) Len() int { return len(s) } + +// Ref is a noop implementation of Ref. +func (s SliceBuffer) Ref() {} + +// Free is a noop implementation of Free. +func (s SliceBuffer) Free() {} + +// Len is a noop implementation of Len. +func (s SliceBuffer) Len() int { return len(s) } func (s SliceBuffer) split(n int) (left, right Buffer) { return s[:n], s[n:] diff --git a/vendor/google.golang.org/grpc/orca/producer.go b/vendor/google.golang.org/grpc/orca/producer.go index 6e7c4c9f30..4d370310a0 100644 --- a/vendor/google.golang.org/grpc/orca/producer.go +++ b/vendor/google.golang.org/grpc/orca/producer.go @@ -46,6 +46,12 @@ func (*producerBuilder) Build(cci any) (balancer.Producer, func()) { backoff: internal.DefaultBackoffFunc, } return p, func() { + p.mu.Lock() + if p.stop != nil { + p.stop() + p.stop = nil + } + p.mu.Unlock() <-p.stopped } } @@ -67,9 +73,9 @@ type OOBListenerOptions struct { ReportInterval time.Duration } -// RegisterOOBListener registers an out-of-band load report listener on sc. -// Any OOBListener may only be registered once per subchannel at a time. The -// returned stop function must be called when no longer needed. Do not +// RegisterOOBListener registers an out-of-band load report listener on a Ready +// sc. Any OOBListener may only be registered once per subchannel at a time. +// The returned stop function must be called when no longer needed. Do not // register a single OOBListener more than once per SubConn. func RegisterOOBListener(sc balancer.SubConn, l OOBListener, opts OOBListenerOptions) (stop func()) { pr, closeFn := sc.GetOrBuildProducer(producerBuilderSingleton) @@ -77,9 +83,6 @@ func RegisterOOBListener(sc balancer.SubConn, l OOBListener, opts OOBListenerOpt p.registerListener(l, opts.ReportInterval) - // TODO: When we can register for SubConn state updates, automatically call - // stop() on SHUTDOWN. - // If stop is called multiple times, prevent it from having any effect on // subsequent calls. return grpcsync.OnceFunc(func() { @@ -96,13 +99,13 @@ type producer struct { // is incremented when stream errors occur and is reset when the stream // reports a result. backoff func(int) time.Duration + stopped chan struct{} // closed when the run goroutine exits mu sync.Mutex intervals map[time.Duration]int // map from interval time to count of listeners requesting that time listeners map[OOBListener]struct{} // set of registered listeners minInterval time.Duration - stop func() // stops the current run goroutine - stopped chan struct{} // closed when the run goroutine exits + stop func() // stops the current run goroutine } // registerListener adds the listener and its requested report interval to the diff --git a/vendor/google.golang.org/grpc/orca/service.go b/vendor/google.golang.org/grpc/orca/service.go index 7461a6b05a..ffe4b5b2b5 100644 --- a/vendor/google.golang.org/grpc/orca/service.go +++ b/vendor/google.golang.org/grpc/orca/service.go @@ -111,9 +111,7 @@ func NewService(opts ServiceOptions) (*Service, error) { // Register creates a new ORCA service implementation configured using the // provided options and registers the same on the provided grpc Server. -func Register(s *grpc.Server, opts ServiceOptions) error { - // TODO(https://github.com/cncf/xds/issues/41): replace *grpc.Server with - // grpc.ServiceRegistrar when possible. +func Register(s grpc.ServiceRegistrar, opts ServiceOptions) error { service, err := NewService(opts) if err != nil { return err diff --git a/vendor/google.golang.org/grpc/rpc_util.go b/vendor/google.golang.org/grpc/rpc_util.go index 2d96f1405e..aba1ae3e67 100644 --- a/vendor/google.golang.org/grpc/rpc_util.go +++ b/vendor/google.golang.org/grpc/rpc_util.go @@ -791,9 +791,8 @@ func checkRecvPayload(pf payloadFormat, recvCompress string, haveCompressor bool if !haveCompressor { if isServer { return status.Newf(codes.Unimplemented, "grpc: Decompressor is not installed for grpc-encoding %q", recvCompress) - } else { - return status.Newf(codes.Internal, "grpc: Decompressor is not installed for grpc-encoding %q", recvCompress) } + return status.Newf(codes.Internal, "grpc: Decompressor is not installed for grpc-encoding %q", recvCompress) } default: return status.Newf(codes.Internal, "grpc: received unexpected payload format %d", pf) diff --git a/vendor/google.golang.org/grpc/version.go b/vendor/google.golang.org/grpc/version.go index a96b6a6bff..d50e843598 100644 --- a/vendor/google.golang.org/grpc/version.go +++ b/vendor/google.golang.org/grpc/version.go @@ -19,4 +19,4 @@ package grpc // Version is the current grpc version. -const Version = "1.67.1" +const Version = "1.68.0" diff --git a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterimpl/clusterimpl.go b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterimpl/clusterimpl.go index 0dc71dfede..bbd754c3f4 100644 --- a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterimpl/clusterimpl.go +++ b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterimpl/clusterimpl.go @@ -231,11 +231,16 @@ func (b *clusterImplBalancer) updateClientConnState(s balancer.ClientConnState) return err } - if b.config == nil || b.config.ChildPolicy.Name != newConfig.ChildPolicy.Name { - if err := b.child.SwitchTo(bb); err != nil { - return fmt.Errorf("error switching to child of type %q: %v", newConfig.ChildPolicy.Name, err) - } + // Build config for the gracefulswitch balancer. It is safe to ignore JSON + // marshaling errors here, since the config was already validated as part of + // ParseConfig(). + cfg := []map[string]any{{newConfig.ChildPolicy.Name: newConfig.ChildPolicy.Config}} + cfgJSON, _ := json.Marshal(cfg) + parsedCfg, err := gracefulswitch.ParseConfig(cfgJSON) + if err != nil { + return err } + b.config = newConfig b.telemetryLabels = newConfig.TelemetryLabels @@ -250,7 +255,7 @@ func (b *clusterImplBalancer) updateClientConnState(s balancer.ClientConnState) // Addresses and sub-balancer config are sent to sub-balancer. return b.child.UpdateClientConnState(balancer.ClientConnState{ ResolverState: s.ResolverState, - BalancerConfig: b.config.ChildPolicy.Config, + BalancerConfig: parsedCfg, }) } diff --git a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/clusterresolver.go b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/clusterresolver.go index 749945059b..3f0c54e8f3 100644 --- a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/clusterresolver.go +++ b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/clusterresolver.go @@ -134,7 +134,7 @@ func (bb) ParseConfig(j json.RawMessage) (serviceconfig.LoadBalancingConfig, err // This will never occur, valid configuration is emitted from the xDS // Client. Validity is already checked in the xDS Client, however, this // double validation is present because Unmarshalling and Validating are - // coupled into one json.Unmarshal operation). We will switch this in + // coupled into one json.Unmarshal operation. We will switch this in // the future to two separate operations. return nil, fmt.Errorf("error unmarshalling xDS LB Policy: %v", err) } diff --git a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/configbuilder.go b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/configbuilder.go index 8740360eef..f62b8e6c8e 100644 --- a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/configbuilder.go +++ b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/configbuilder.go @@ -37,7 +37,7 @@ import ( const million = 1000000 -// priorityConfig is config for one priority. For example, if there an EDS and a +// priorityConfig is config for one priority. For example, if there's an EDS and a // DNS, the priority list will be [priorityConfig{EDS}, priorityConfig{DNS}]. // // Each priorityConfig corresponds to one discovery mechanism from the LBConfig @@ -171,7 +171,7 @@ func buildClusterImplConfigForEDS(g *nameGenerator, edsResp xdsresource.Endpoint } // Localities of length 0 is triggered by an NACK or resource-not-found - // error before update, or a empty localities list in a update. In either + // error before update, or an empty localities list in an update. In either // case want to create a priority, and send down empty address list, causing // TF for that priority. "If any discovery mechanism instance experiences an // error retrieving data, and it has not previously reported any results, it diff --git a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/resource_resolver.go b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/resource_resolver.go index 5bc64b8630..3bcfba8732 100644 --- a/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/resource_resolver.go +++ b/vendor/google.golang.org/grpc/xds/internal/balancer/clusterresolver/resource_resolver.go @@ -77,7 +77,7 @@ type endpointsResolver interface { // discoveryMechanismKey is {type+resource_name}, it's used as the map key, so // that the same resource resolver can be reused (e.g. when there are two // mechanisms, both for the same EDS resource, but has different circuit -// breaking config. +// breaking config). type discoveryMechanismKey struct { typ DiscoveryMechanismType name string diff --git a/vendor/google.golang.org/grpc/xds/internal/balancer/outlierdetection/balancer.go b/vendor/google.golang.org/grpc/xds/internal/balancer/outlierdetection/balancer.go index 53ba72c081..4ccff08b51 100644 --- a/vendor/google.golang.org/grpc/xds/internal/balancer/outlierdetection/balancer.go +++ b/vendor/google.golang.org/grpc/xds/internal/balancer/outlierdetection/balancer.go @@ -657,7 +657,7 @@ func (b *outlierDetectionBalancer) handleChildStateUpdate(u balancer.State) { func (b *outlierDetectionBalancer) handleLBConfigUpdate(u lbCfgUpdate) { lbCfg := u.lbCfg noopCfg := lbCfg.SuccessRateEjection == nil && lbCfg.FailurePercentageEjection == nil - // If the child has sent it's first update and this config flips the noop + // If the child has sent its first update and this config flips the noop // bit compared to the most recent picker update sent upward, then a new // picker with this updated bit needs to be forwarded upward. If a child // update was received during the suppression of child updates within diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/authority.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/authority.go index 3251737f18..668c436fb5 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/authority.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/authority.go @@ -104,6 +104,7 @@ type authorityArgs struct { serializer *grpcsync.CallbackSerializer resourceTypeGetter func(string) xdsresource.Type watchExpiryTimeout time.Duration + backoff func(int) time.Duration // Backoff for ADS and LRS stream failures. logger *grpclog.PrefixLogger } @@ -123,6 +124,7 @@ func newAuthority(args authorityArgs) (*authority, error) { OnRecvHandler: ret.handleResourceUpdate, OnErrorHandler: ret.newConnectionError, OnSendHandler: ret.transportOnSendHandler, + Backoff: args.backoff, Logger: args.logger, NodeProto: args.bootstrapCfg.Node(), }) @@ -247,9 +249,12 @@ func (a *authority) updateResourceStateAndScheduleCallbacks(rType xdsresource.Ty state.deletionIgnored = false a.logger.Infof("A valid update was received for resource %q of type %q after previously ignoring a deletion", name, rType.TypeName()) } - // Notify watchers only if this is a first time update or it is different - // from the one currently cached. - if state.cache == nil || !state.cache.Equal(uErr.resource) { + // Notify watchers if any of these conditions are met: + // - this is the first update for this resource + // - this update is different from the one currently cached + // - the previous update for this resource was NACKed, but the update + // before that was the same as this update. + if state.cache == nil || !state.cache.Equal(uErr.resource) || state.md.ErrState != nil { for watcher := range state.watchers { watcher := watcher resource := uErr.resource diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_new.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_new.go index 6097e86925..5e11f557b2 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_new.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_new.go @@ -25,6 +25,7 @@ import ( "time" "google.golang.org/grpc/internal" + "google.golang.org/grpc/internal/backoff" "google.golang.org/grpc/internal/cache" "google.golang.org/grpc/internal/grpcsync" "google.golang.org/grpc/internal/xds/bootstrap" @@ -53,16 +54,17 @@ const NameForServer = "#server" // only when all references are released, and it is safe for the caller to // invoke this close function multiple times. func New(name string) (XDSClient, func(), error) { - return newRefCounted(name, defaultWatchExpiryTimeout, defaultIdleAuthorityDeleteTimeout) + return newRefCounted(name, defaultWatchExpiryTimeout, defaultIdleAuthorityDeleteTimeout, backoff.DefaultExponential.Backoff) } // newClientImpl returns a new xdsClient with the given config. -func newClientImpl(config *bootstrap.Config, watchExpiryTimeout time.Duration, idleAuthorityDeleteTimeout time.Duration) (*clientImpl, error) { +func newClientImpl(config *bootstrap.Config, watchExpiryTimeout time.Duration, idleAuthorityDeleteTimeout time.Duration, streamBackoff func(int) time.Duration) (*clientImpl, error) { ctx, cancel := context.WithCancel(context.Background()) c := &clientImpl{ done: grpcsync.NewEvent(), config: config, watchExpiryTimeout: watchExpiryTimeout, + backoff: streamBackoff, serializer: grpcsync.NewCallbackSerializer(ctx), serializerClose: cancel, resourceTypes: newResourceTypeRegistry(), @@ -90,6 +92,11 @@ type OptionsForTesting struct { // AuthorityIdleTimeout is the timeout before idle authorities are deleted. // If unspecified, uses the default value used in non-test code. AuthorityIdleTimeout time.Duration + + // StreamBackoffAfterFailure is the backoff function used to determine the + // backoff duration after stream failures. If unspecified, uses the default + // value used in non-test code. + StreamBackoffAfterFailure func(int) time.Duration } // NewForTesting returns an xDS client configured with the provided options. @@ -111,11 +118,14 @@ func NewForTesting(opts OptionsForTesting) (XDSClient, func(), error) { if opts.AuthorityIdleTimeout == 0 { opts.AuthorityIdleTimeout = defaultIdleAuthorityDeleteTimeout } + if opts.StreamBackoffAfterFailure == nil { + opts.StreamBackoffAfterFailure = defaultStreamBackoffFunc + } if err := bootstrap.SetFallbackBootstrapConfig(opts.Contents); err != nil { return nil, nil, err } - client, cancel, err := newRefCounted(opts.Name, opts.WatchExpiryTimeout, opts.AuthorityIdleTimeout) + client, cancel, err := newRefCounted(opts.Name, opts.WatchExpiryTimeout, opts.AuthorityIdleTimeout, opts.StreamBackoffAfterFailure) return client, func() { bootstrap.UnsetFallbackBootstrapConfigForTesting(); cancel() }, err } diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_refcounted.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_refcounted.go index 1efb4de42e..a8c7213aea 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_refcounted.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/client_refcounted.go @@ -23,6 +23,7 @@ import ( "sync/atomic" "time" + "google.golang.org/grpc/internal/backoff" "google.golang.org/grpc/internal/grpcsync" "google.golang.org/grpc/internal/xds/bootstrap" ) @@ -37,6 +38,8 @@ var ( // overridden in tests to give them visibility into certain events. xdsClientImplCreateHook = func(string) {} xdsClientImplCloseHook = func(string) {} + + defaultStreamBackoffFunc = backoff.DefaultExponential.Backoff ) func clientRefCountedClose(name string) { @@ -60,7 +63,7 @@ func clientRefCountedClose(name string) { // newRefCounted creates a new reference counted xDS client implementation for // name, if one does not exist already. If an xDS client for the given name // exists, it gets a reference to it and returns it. -func newRefCounted(name string, watchExpiryTimeout, idleAuthorityTimeout time.Duration) (XDSClient, func(), error) { +func newRefCounted(name string, watchExpiryTimeout, idleAuthorityTimeout time.Duration, streamBackoff func(int) time.Duration) (XDSClient, func(), error) { clientsMu.Lock() defer clientsMu.Unlock() @@ -74,7 +77,7 @@ func newRefCounted(name string, watchExpiryTimeout, idleAuthorityTimeout time.Du if err != nil { return nil, nil, fmt.Errorf("xds: failed to get xDS bootstrap config: %v", err) } - c, err := newClientImpl(config, watchExpiryTimeout, idleAuthorityTimeout) + c, err := newClientImpl(config, watchExpiryTimeout, idleAuthorityTimeout, streamBackoff) if err != nil { return nil, nil, err } diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl.go index 9f619016a0..715b1d6154 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl.go @@ -37,6 +37,7 @@ type clientImpl struct { config *bootstrap.Config logger *grpclog.PrefixLogger watchExpiryTimeout time.Duration + backoff func(int) time.Duration // Backoff for ADS and LRS stream failures. serializer *grpcsync.CallbackSerializer serializerClose func() resourceTypes *resourceTypeRegistry diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl_authority.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl_authority.go index 1ce20fabdf..56c26b8175 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl_authority.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/clientimpl_authority.go @@ -114,6 +114,7 @@ func (c *clientImpl) newAuthorityLocked(config *bootstrap.ServerConfig) (_ *auth serializer: c.serializer, resourceTypeGetter: c.resourceTypes.get, watchExpiryTimeout: c.watchExpiryTimeout, + backoff: c.backoff, logger: grpclog.NewPrefixLogger(logger, authorityPrefix(c, config.ServerURI())), }) if err != nil { diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/transport/transport.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/transport/transport.go index 0bc0d38680..59b221727a 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/transport/transport.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/transport/transport.go @@ -192,16 +192,14 @@ func New(opts Options) (*Transport, error) { return nil, errors.New("missing OnSend callback handler when creating a new transport") } - // Dial the xDS management with the passed in credentials. - dopts := []grpc.DialOption{ - opts.ServerCfg.CredsDialOption(), - grpc.WithKeepaliveParams(keepalive.ClientParameters{ - // We decided to use these sane defaults in all languages, and - // kicked the can down the road as far making these configurable. - Time: 5 * time.Minute, - Timeout: 20 * time.Second, - }), - } + // Dial the xDS management server with dial options specified by the server + // configuration and a static keepalive configuration that is common across + // gRPC language implementations. + kpCfg := grpc.WithKeepaliveParams(keepalive.ClientParameters{ + Time: 5 * time.Minute, + Timeout: 20 * time.Second, + }) + dopts := append([]grpc.DialOption{kpCfg}, opts.ServerCfg.DialOptions()...) grpcNewClient := transportinternal.GRPCNewClient.(func(string, ...grpc.DialOption) (*grpc.ClientConn, error)) cc, err := grpcNewClient(opts.ServerCfg.ServerURI(), dopts...) if err != nil { diff --git a/vendor/google.golang.org/grpc/xds/internal/xdsclient/xdsresource/unmarshal_lds.go b/vendor/google.golang.org/grpc/xds/internal/xdsclient/xdsresource/unmarshal_lds.go index e9a29b9387..1b0d4599f1 100644 --- a/vendor/google.golang.org/grpc/xds/internal/xdsclient/xdsresource/unmarshal_lds.go +++ b/vendor/google.golang.org/grpc/xds/internal/xdsclient/xdsresource/unmarshal_lds.go @@ -39,7 +39,7 @@ func unmarshalListenerResource(r *anypb.Any) (string, ListenerUpdate, error) { } if !IsListenerResource(r.GetTypeUrl()) { - return "", ListenerUpdate{}, fmt.Errorf("unexpected resource type: %q ", r.GetTypeUrl()) + return "", ListenerUpdate{}, fmt.Errorf("unexpected listener resource type: %q ", r.GetTypeUrl()) } lis := &v3listenerpb.Listener{} if err := proto.Unmarshal(r.GetValue(), lis); err != nil { @@ -68,7 +68,7 @@ func processClientSideListener(lis *v3listenerpb.Listener) (*ListenerUpdate, err apiLisAny := lis.GetApiListener().GetApiListener() if !IsHTTPConnManagerResource(apiLisAny.GetTypeUrl()) { - return nil, fmt.Errorf("unexpected resource type: %q", apiLisAny.GetTypeUrl()) + return nil, fmt.Errorf("unexpected http connection manager resource type: %q", apiLisAny.GetTypeUrl()) } apiLis := &v3httppb.HttpConnectionManager{} if err := proto.Unmarshal(apiLisAny.GetValue(), apiLis); err != nil { diff --git a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go index 4b177c8206..e9fe103943 100644 --- a/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go +++ b/vendor/google.golang.org/protobuf/encoding/protojson/well_known_types.go @@ -348,7 +348,11 @@ func (d decoder) unmarshalAnyValue(unmarshal unmarshalFunc, m protoreflect.Messa switch tok.Kind() { case json.ObjectClose: if !found { - return d.newError(tok.Pos(), `missing "value" field`) + // We tolerate an omitted `value` field with the google.protobuf.Empty Well-Known-Type, + // for compatibility with other proto runtimes that have interpreted the spec differently. + if m.Descriptor().FullName() != genid.Empty_message_fullname { + return d.newError(tok.Pos(), `missing "value" field`) + } } return nil diff --git a/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb b/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb index ff6a38360a..2c0693d7ab 100644 Binary files a/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb and b/vendor/google.golang.org/protobuf/internal/editiondefaults/editions_defaults.binpb differ diff --git a/vendor/google.golang.org/protobuf/internal/editionssupport/editions.go b/vendor/google.golang.org/protobuf/internal/editionssupport/editions.go index 08dad7692c..bf1aba0e85 100644 --- a/vendor/google.golang.org/protobuf/internal/editionssupport/editions.go +++ b/vendor/google.golang.org/protobuf/internal/editionssupport/editions.go @@ -10,4 +10,9 @@ import "google.golang.org/protobuf/types/descriptorpb" const ( Minimum = descriptorpb.Edition_EDITION_PROTO2 Maximum = descriptorpb.Edition_EDITION_2023 + + // MaximumKnown is the maximum edition that is known to Go Protobuf, but not + // declared as supported. In other words: end users cannot use it, but + // testprotos inside Go Protobuf can. + MaximumKnown = descriptorpb.Edition_EDITION_2024 ) diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go index fa790e0ff1..f325298564 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/desc.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/desc.go @@ -32,6 +32,7 @@ const ( EditionProto2 Edition = 998 EditionProto3 Edition = 999 Edition2023 Edition = 1000 + Edition2024 Edition = 1001 EditionUnsupported Edition = 100000 ) @@ -77,28 +78,42 @@ type ( Locations SourceLocations } + // EditionFeatures is a frequently-instantiated struct, so please take care + // to minimize padding when adding new fields to this struct (add them in + // the right place/order). EditionFeatures struct { + // StripEnumPrefix determines if the plugin generates enum value + // constants as-is, with their prefix stripped, or both variants. + StripEnumPrefix int + // IsFieldPresence is true if field_presence is EXPLICIT // https://protobuf.dev/editions/features/#field_presence IsFieldPresence bool + // IsFieldPresence is true if field_presence is LEGACY_REQUIRED // https://protobuf.dev/editions/features/#field_presence IsLegacyRequired bool + // IsOpenEnum is true if enum_type is OPEN // https://protobuf.dev/editions/features/#enum_type IsOpenEnum bool + // IsPacked is true if repeated_field_encoding is PACKED // https://protobuf.dev/editions/features/#repeated_field_encoding IsPacked bool + // IsUTF8Validated is true if utf_validation is VERIFY // https://protobuf.dev/editions/features/#utf8_validation IsUTF8Validated bool + // IsDelimitedEncoded is true if message_encoding is DELIMITED // https://protobuf.dev/editions/features/#message_encoding IsDelimitedEncoded bool + // IsJSONCompliant is true if json_format is ALLOW // https://protobuf.dev/editions/features/#json_format IsJSONCompliant bool + // GenerateLegacyUnmarshalJSON determines if the plugin generates the // UnmarshalJSON([]byte) error method for enums. GenerateLegacyUnmarshalJSON bool diff --git a/vendor/google.golang.org/protobuf/internal/filedesc/editions.go b/vendor/google.golang.org/protobuf/internal/filedesc/editions.go index fd4d0c83d2..7611796e86 100644 --- a/vendor/google.golang.org/protobuf/internal/filedesc/editions.go +++ b/vendor/google.golang.org/protobuf/internal/filedesc/editions.go @@ -32,6 +32,10 @@ func unmarshalGoFeature(b []byte, parent EditionFeatures) EditionFeatures { v, m := protowire.ConsumeVarint(b) b = b[m:] parent.GenerateLegacyUnmarshalJSON = protowire.DecodeBool(v) + case genid.GoFeatures_StripEnumPrefix_field_number: + v, m := protowire.ConsumeVarint(b) + b = b[m:] + parent.StripEnumPrefix = int(v) default: panic(fmt.Sprintf("unkown field number %d while unmarshalling GoFeatures", num)) } diff --git a/vendor/google.golang.org/protobuf/internal/genid/go_features_gen.go b/vendor/google.golang.org/protobuf/internal/genid/go_features_gen.go index 7f67cbb6e9..09792d96f6 100644 --- a/vendor/google.golang.org/protobuf/internal/genid/go_features_gen.go +++ b/vendor/google.golang.org/protobuf/internal/genid/go_features_gen.go @@ -21,13 +21,30 @@ const ( // Field names for pb.GoFeatures. const ( GoFeatures_LegacyUnmarshalJsonEnum_field_name protoreflect.Name = "legacy_unmarshal_json_enum" + GoFeatures_StripEnumPrefix_field_name protoreflect.Name = "strip_enum_prefix" GoFeatures_LegacyUnmarshalJsonEnum_field_fullname protoreflect.FullName = "pb.GoFeatures.legacy_unmarshal_json_enum" + GoFeatures_StripEnumPrefix_field_fullname protoreflect.FullName = "pb.GoFeatures.strip_enum_prefix" ) // Field numbers for pb.GoFeatures. const ( GoFeatures_LegacyUnmarshalJsonEnum_field_number protoreflect.FieldNumber = 1 + GoFeatures_StripEnumPrefix_field_number protoreflect.FieldNumber = 3 +) + +// Full and short names for pb.GoFeatures.StripEnumPrefix. +const ( + GoFeatures_StripEnumPrefix_enum_fullname = "pb.GoFeatures.StripEnumPrefix" + GoFeatures_StripEnumPrefix_enum_name = "StripEnumPrefix" +) + +// Enum values for pb.GoFeatures.StripEnumPrefix. +const ( + GoFeatures_STRIP_ENUM_PREFIX_UNSPECIFIED_enum_value = 0 + GoFeatures_STRIP_ENUM_PREFIX_KEEP_enum_value = 1 + GoFeatures_STRIP_ENUM_PREFIX_GENERATE_BOTH_enum_value = 2 + GoFeatures_STRIP_ENUM_PREFIX_STRIP_enum_value = 3 ) // Extension numbers diff --git a/vendor/google.golang.org/protobuf/internal/version/version.go b/vendor/google.golang.org/protobuf/internal/version/version.go index fb8e15e8da..62a52a40a3 100644 --- a/vendor/google.golang.org/protobuf/internal/version/version.go +++ b/vendor/google.golang.org/protobuf/internal/version/version.go @@ -52,7 +52,7 @@ import ( const ( Major = 1 Minor = 35 - Patch = 1 + Patch = 2 PreRelease = "" ) diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go index 8fbecb4f58..69a0505091 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/desc.go @@ -13,6 +13,8 @@ package protodesc import ( + "strings" + "google.golang.org/protobuf/internal/editionssupport" "google.golang.org/protobuf/internal/errors" "google.golang.org/protobuf/internal/filedesc" @@ -102,13 +104,17 @@ func (o FileOptions) New(fd *descriptorpb.FileDescriptorProto, r Resolver) (prot default: return nil, errors.New("invalid syntax: %q", fd.GetSyntax()) } - if f.L1.Syntax == protoreflect.Editions && (fd.GetEdition() < editionssupport.Minimum || fd.GetEdition() > editionssupport.Maximum) { - return nil, errors.New("use of edition %v not yet supported by the Go Protobuf runtime", fd.GetEdition()) - } f.L1.Path = fd.GetName() if f.L1.Path == "" { return nil, errors.New("file path must be populated") } + if f.L1.Syntax == protoreflect.Editions && (fd.GetEdition() < editionssupport.Minimum || fd.GetEdition() > editionssupport.Maximum) { + // Allow cmd/protoc-gen-go/testdata to use any edition for easier + // testing of upcoming edition features. + if !strings.HasPrefix(fd.GetName(), "cmd/protoc-gen-go/testdata/") { + return nil, errors.New("use of edition %v not yet supported by the Go Protobuf runtime", fd.GetEdition()) + } + } f.L1.Package = protoreflect.FullName(fd.GetPackage()) if !f.L1.Package.IsValid() && f.L1.Package != "" { return nil, errors.New("invalid package: %q", f.L1.Package) diff --git a/vendor/google.golang.org/protobuf/reflect/protodesc/editions.go b/vendor/google.golang.org/protobuf/reflect/protodesc/editions.go index 002e0047ae..d0aeab9585 100644 --- a/vendor/google.golang.org/protobuf/reflect/protodesc/editions.go +++ b/vendor/google.golang.org/protobuf/reflect/protodesc/editions.go @@ -43,6 +43,8 @@ func toEditionProto(ed filedesc.Edition) descriptorpb.Edition { return descriptorpb.Edition_EDITION_PROTO3 case filedesc.Edition2023: return descriptorpb.Edition_EDITION_2023 + case filedesc.Edition2024: + return descriptorpb.Edition_EDITION_2024 default: panic(fmt.Sprintf("unknown value for edition: %v", ed)) } @@ -127,6 +129,9 @@ func mergeEditionFeatures(parentDesc protoreflect.Descriptor, child *descriptorp if luje := goFeatures.LegacyUnmarshalJsonEnum; luje != nil { parentFS.GenerateLegacyUnmarshalJSON = *luje } + if sep := goFeatures.StripEnumPrefix; sep != nil { + parentFS.StripEnumPrefix = int(*sep) + } } return parentFS diff --git a/vendor/google.golang.org/protobuf/testing/protocmp/util.go b/vendor/google.golang.org/protobuf/testing/protocmp/util.go index 838e70fbcc..2282811b6e 100644 --- a/vendor/google.golang.org/protobuf/testing/protocmp/util.go +++ b/vendor/google.golang.org/protobuf/testing/protocmp/util.go @@ -524,6 +524,9 @@ func IgnoreUnknown() cmp.Option { // handled by this option. To sort Go slices that are not repeated fields, // consider using [github.com/google/go-cmp/cmp/cmpopts.SortSlices] instead. // +// The sorting of messages does not take into account ignored fields or oneofs +// as a result of [IgnoreFields] or [IgnoreOneofs]. +// // This must be used in conjunction with [Transform]. func SortRepeated(lessFunc any) cmp.Option { t, ok := checkTTBFunc(lessFunc) @@ -624,6 +627,9 @@ func checkTTBFunc(lessFunc any) (reflect.Type, bool) { // ... // user-provided definition for less // })) // +// The sorting of messages does not take into account ignored fields or oneofs +// as a result of [IgnoreFields] or [IgnoreOneofs]. +// // This must be used in conjunction with [Transform]. func SortRepeatedFields(message proto.Message, names ...protoreflect.Name) cmp.Option { var opts cmp.Options diff --git a/vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.pb.go b/vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.pb.go index c7e860fcd6..5067b89e90 100644 --- a/vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.pb.go +++ b/vendor/google.golang.org/protobuf/types/gofeaturespb/go_features.pb.go @@ -18,13 +18,76 @@ import ( sync "sync" ) +type GoFeatures_StripEnumPrefix int32 + +const ( + GoFeatures_STRIP_ENUM_PREFIX_UNSPECIFIED GoFeatures_StripEnumPrefix = 0 + GoFeatures_STRIP_ENUM_PREFIX_KEEP GoFeatures_StripEnumPrefix = 1 + GoFeatures_STRIP_ENUM_PREFIX_GENERATE_BOTH GoFeatures_StripEnumPrefix = 2 + GoFeatures_STRIP_ENUM_PREFIX_STRIP GoFeatures_StripEnumPrefix = 3 +) + +// Enum value maps for GoFeatures_StripEnumPrefix. +var ( + GoFeatures_StripEnumPrefix_name = map[int32]string{ + 0: "STRIP_ENUM_PREFIX_UNSPECIFIED", + 1: "STRIP_ENUM_PREFIX_KEEP", + 2: "STRIP_ENUM_PREFIX_GENERATE_BOTH", + 3: "STRIP_ENUM_PREFIX_STRIP", + } + GoFeatures_StripEnumPrefix_value = map[string]int32{ + "STRIP_ENUM_PREFIX_UNSPECIFIED": 0, + "STRIP_ENUM_PREFIX_KEEP": 1, + "STRIP_ENUM_PREFIX_GENERATE_BOTH": 2, + "STRIP_ENUM_PREFIX_STRIP": 3, + } +) + +func (x GoFeatures_StripEnumPrefix) Enum() *GoFeatures_StripEnumPrefix { + p := new(GoFeatures_StripEnumPrefix) + *p = x + return p +} + +func (x GoFeatures_StripEnumPrefix) String() string { + return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) +} + +func (GoFeatures_StripEnumPrefix) Descriptor() protoreflect.EnumDescriptor { + return file_google_protobuf_go_features_proto_enumTypes[0].Descriptor() +} + +func (GoFeatures_StripEnumPrefix) Type() protoreflect.EnumType { + return &file_google_protobuf_go_features_proto_enumTypes[0] +} + +func (x GoFeatures_StripEnumPrefix) Number() protoreflect.EnumNumber { + return protoreflect.EnumNumber(x) +} + +// Deprecated: Do not use. +func (x *GoFeatures_StripEnumPrefix) UnmarshalJSON(b []byte) error { + num, err := protoimpl.X.UnmarshalJSONEnum(x.Descriptor(), b) + if err != nil { + return err + } + *x = GoFeatures_StripEnumPrefix(num) + return nil +} + +// Deprecated: Use GoFeatures_StripEnumPrefix.Descriptor instead. +func (GoFeatures_StripEnumPrefix) EnumDescriptor() ([]byte, []int) { + return file_google_protobuf_go_features_proto_rawDescGZIP(), []int{0, 0} +} + type GoFeatures struct { state protoimpl.MessageState sizeCache protoimpl.SizeCache unknownFields protoimpl.UnknownFields // Whether or not to generate the deprecated UnmarshalJSON method for enums. - LegacyUnmarshalJsonEnum *bool `protobuf:"varint,1,opt,name=legacy_unmarshal_json_enum,json=legacyUnmarshalJsonEnum" json:"legacy_unmarshal_json_enum,omitempty"` + LegacyUnmarshalJsonEnum *bool `protobuf:"varint,1,opt,name=legacy_unmarshal_json_enum,json=legacyUnmarshalJsonEnum" json:"legacy_unmarshal_json_enum,omitempty"` + StripEnumPrefix *GoFeatures_StripEnumPrefix `protobuf:"varint,3,opt,name=strip_enum_prefix,json=stripEnumPrefix,enum=pb.GoFeatures_StripEnumPrefix" json:"strip_enum_prefix,omitempty"` } func (x *GoFeatures) Reset() { @@ -64,6 +127,13 @@ func (x *GoFeatures) GetLegacyUnmarshalJsonEnum() bool { return false } +func (x *GoFeatures) GetStripEnumPrefix() GoFeatures_StripEnumPrefix { + if x != nil && x.StripEnumPrefix != nil { + return *x.StripEnumPrefix + } + return GoFeatures_STRIP_ENUM_PREFIX_UNSPECIFIED +} + var file_google_protobuf_go_features_proto_extTypes = []protoimpl.ExtensionInfo{ { ExtendedType: (*descriptorpb.FeatureSet)(nil), @@ -88,7 +158,7 @@ var file_google_protobuf_go_features_proto_rawDesc = []byte{ 0x66, 0x2f, 0x67, 0x6f, 0x5f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x02, 0x70, 0x62, 0x1a, 0x20, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x64, 0x65, 0x73, 0x63, 0x72, 0x69, 0x70, - 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xcd, 0x01, 0x0a, 0x0a, 0x47, 0x6f, + 0x74, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xe0, 0x03, 0x0a, 0x0a, 0x47, 0x6f, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x12, 0xbe, 0x01, 0x0a, 0x1a, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x5f, 0x75, 0x6e, 0x6d, 0x61, 0x72, 0x73, 0x68, 0x61, 0x6c, 0x5f, 0x6a, 0x73, 0x6f, 0x6e, 0x5f, 0x65, 0x6e, 0x75, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x42, 0x80, 0x01, @@ -101,14 +171,31 @@ var file_google_protobuf_go_features_proto_rawDesc = []byte{ 0x20, 0x62, 0x65, 0x20, 0x72, 0x65, 0x6d, 0x6f, 0x76, 0x65, 0x64, 0x20, 0x69, 0x6e, 0x20, 0x61, 0x20, 0x66, 0x75, 0x74, 0x75, 0x72, 0x65, 0x20, 0x65, 0x64, 0x69, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x52, 0x17, 0x6c, 0x65, 0x67, 0x61, 0x63, 0x79, 0x55, 0x6e, 0x6d, 0x61, 0x72, 0x73, 0x68, 0x61, - 0x6c, 0x4a, 0x73, 0x6f, 0x6e, 0x45, 0x6e, 0x75, 0x6d, 0x3a, 0x3c, 0x0a, 0x02, 0x67, 0x6f, 0x12, - 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, - 0x66, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x18, 0xea, 0x07, 0x20, - 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x6f, 0x46, 0x65, 0x61, 0x74, 0x75, - 0x72, 0x65, 0x73, 0x52, 0x02, 0x67, 0x6f, 0x42, 0x2f, 0x5a, 0x2d, 0x67, 0x6f, 0x6f, 0x67, 0x6c, - 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, 0x70, 0x72, 0x6f, - 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x67, 0x6f, 0x66, 0x65, - 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x70, 0x62, + 0x6c, 0x4a, 0x73, 0x6f, 0x6e, 0x45, 0x6e, 0x75, 0x6d, 0x12, 0x7c, 0x0a, 0x11, 0x73, 0x74, 0x72, + 0x69, 0x70, 0x5f, 0x65, 0x6e, 0x75, 0x6d, 0x5f, 0x70, 0x72, 0x65, 0x66, 0x69, 0x78, 0x18, 0x03, + 0x20, 0x01, 0x28, 0x0e, 0x32, 0x1e, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x6f, 0x46, 0x65, 0x61, 0x74, + 0x75, 0x72, 0x65, 0x73, 0x2e, 0x53, 0x74, 0x72, 0x69, 0x70, 0x45, 0x6e, 0x75, 0x6d, 0x50, 0x72, + 0x65, 0x66, 0x69, 0x78, 0x42, 0x30, 0x88, 0x01, 0x01, 0x98, 0x01, 0x06, 0x98, 0x01, 0x07, 0x98, + 0x01, 0x01, 0xa2, 0x01, 0x1b, 0x12, 0x16, 0x53, 0x54, 0x52, 0x49, 0x50, 0x5f, 0x45, 0x4e, 0x55, + 0x4d, 0x5f, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, 0x4b, 0x45, 0x45, 0x50, 0x18, 0x84, 0x07, + 0xb2, 0x01, 0x03, 0x08, 0xe9, 0x07, 0x52, 0x0f, 0x73, 0x74, 0x72, 0x69, 0x70, 0x45, 0x6e, 0x75, + 0x6d, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x22, 0x92, 0x01, 0x0a, 0x0f, 0x53, 0x74, 0x72, 0x69, + 0x70, 0x45, 0x6e, 0x75, 0x6d, 0x50, 0x72, 0x65, 0x66, 0x69, 0x78, 0x12, 0x21, 0x0a, 0x1d, 0x53, + 0x54, 0x52, 0x49, 0x50, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, + 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x1a, + 0x0a, 0x16, 0x53, 0x54, 0x52, 0x49, 0x50, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x50, 0x52, 0x45, + 0x46, 0x49, 0x58, 0x5f, 0x4b, 0x45, 0x45, 0x50, 0x10, 0x01, 0x12, 0x23, 0x0a, 0x1f, 0x53, 0x54, + 0x52, 0x49, 0x50, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x50, 0x52, 0x45, 0x46, 0x49, 0x58, 0x5f, + 0x47, 0x45, 0x4e, 0x45, 0x52, 0x41, 0x54, 0x45, 0x5f, 0x42, 0x4f, 0x54, 0x48, 0x10, 0x02, 0x12, + 0x1b, 0x0a, 0x17, 0x53, 0x54, 0x52, 0x49, 0x50, 0x5f, 0x45, 0x4e, 0x55, 0x4d, 0x5f, 0x50, 0x52, + 0x45, 0x46, 0x49, 0x58, 0x5f, 0x53, 0x54, 0x52, 0x49, 0x50, 0x10, 0x03, 0x3a, 0x3c, 0x0a, 0x02, + 0x67, 0x6f, 0x12, 0x1b, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, + 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x46, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x53, 0x65, 0x74, 0x18, + 0xea, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0e, 0x2e, 0x70, 0x62, 0x2e, 0x47, 0x6f, 0x46, 0x65, + 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x52, 0x02, 0x67, 0x6f, 0x42, 0x2f, 0x5a, 0x2d, 0x67, 0x6f, + 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x67, 0x6f, 0x6c, 0x61, 0x6e, 0x67, 0x2e, 0x6f, 0x72, 0x67, 0x2f, + 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x79, 0x70, 0x65, 0x73, 0x2f, 0x67, + 0x6f, 0x66, 0x65, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x70, 0x62, } var ( @@ -123,19 +210,22 @@ func file_google_protobuf_go_features_proto_rawDescGZIP() []byte { return file_google_protobuf_go_features_proto_rawDescData } +var file_google_protobuf_go_features_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_google_protobuf_go_features_proto_msgTypes = make([]protoimpl.MessageInfo, 1) var file_google_protobuf_go_features_proto_goTypes = []any{ - (*GoFeatures)(nil), // 0: pb.GoFeatures - (*descriptorpb.FeatureSet)(nil), // 1: google.protobuf.FeatureSet + (GoFeatures_StripEnumPrefix)(0), // 0: pb.GoFeatures.StripEnumPrefix + (*GoFeatures)(nil), // 1: pb.GoFeatures + (*descriptorpb.FeatureSet)(nil), // 2: google.protobuf.FeatureSet } var file_google_protobuf_go_features_proto_depIdxs = []int32{ - 1, // 0: pb.go:extendee -> google.protobuf.FeatureSet - 0, // 1: pb.go:type_name -> pb.GoFeatures - 2, // [2:2] is the sub-list for method output_type - 2, // [2:2] is the sub-list for method input_type - 1, // [1:2] is the sub-list for extension type_name - 0, // [0:1] is the sub-list for extension extendee - 0, // [0:0] is the sub-list for field type_name + 0, // 0: pb.GoFeatures.strip_enum_prefix:type_name -> pb.GoFeatures.StripEnumPrefix + 2, // 1: pb.go:extendee -> google.protobuf.FeatureSet + 1, // 2: pb.go:type_name -> pb.GoFeatures + 3, // [3:3] is the sub-list for method output_type + 3, // [3:3] is the sub-list for method input_type + 2, // [2:3] is the sub-list for extension type_name + 1, // [1:2] is the sub-list for extension extendee + 0, // [0:1] is the sub-list for field type_name } func init() { file_google_protobuf_go_features_proto_init() } @@ -148,13 +238,14 @@ func file_google_protobuf_go_features_proto_init() { File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: file_google_protobuf_go_features_proto_rawDesc, - NumEnums: 0, + NumEnums: 1, NumMessages: 1, NumExtensions: 1, NumServices: 0, }, GoTypes: file_google_protobuf_go_features_proto_goTypes, DependencyIndexes: file_google_protobuf_go_features_proto_depIdxs, + EnumInfos: file_google_protobuf_go_features_proto_enumTypes, MessageInfos: file_google_protobuf_go_features_proto_msgTypes, ExtensionInfos: file_google_protobuf_go_features_proto_extTypes, }.Build() diff --git a/vendor/modules.txt b/vendor/modules.txt index 67da8e46fe..0a136774ca 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -83,20 +83,20 @@ filippo.io/edwards25519/field # github.com/4meepo/tagalign v1.3.4 ## explicit; go 1.19 github.com/4meepo/tagalign -# github.com/Abirdcfly/dupword v0.1.1 -## explicit; go 1.20 +# github.com/Abirdcfly/dupword v0.1.3 +## explicit; go 1.22.0 github.com/Abirdcfly/dupword # github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0 ## explicit; go 1.16 github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider -# github.com/Antonboom/errname v0.1.13 -## explicit; go 1.20 +# github.com/Antonboom/errname v1.0.0 +## explicit; go 1.22.1 github.com/Antonboom/errname/pkg/analyzer -# github.com/Antonboom/nilnil v0.1.9 -## explicit; go 1.20 +# github.com/Antonboom/nilnil v1.0.0 +## explicit; go 1.22.0 github.com/Antonboom/nilnil/pkg/analyzer -# github.com/Antonboom/testifylint v1.4.3 -## explicit; go 1.20 +# github.com/Antonboom/testifylint v1.5.0 +## explicit; go 1.22.1 github.com/Antonboom/testifylint/analyzer github.com/Antonboom/testifylint/internal/analysisutil github.com/Antonboom/testifylint/internal/checkers @@ -261,10 +261,10 @@ github.com/ProtonMail/go-crypto/openpgp/s2k # github.com/ThalesIgnite/crypto11 v1.2.5 ## explicit; go 1.13 github.com/ThalesIgnite/crypto11 -# github.com/alecthomas/go-check-sumtype v0.1.4 +# github.com/alecthomas/go-check-sumtype v0.2.0 ## explicit; go 1.18 github.com/alecthomas/go-check-sumtype -# github.com/alexkohler/nakedret/v2 v2.0.4 +# github.com/alexkohler/nakedret/v2 v2.0.5 ## explicit; go 1.21 github.com/alexkohler/nakedret/v2 # github.com/alexkohler/prealloc v1.0.0 @@ -492,8 +492,8 @@ github.com/awslabs/amazon-ecr-credential-helper/ecr-login/version # github.com/beorn7/perks v1.0.1 ## explicit; go 1.11 github.com/beorn7/perks/quantile -# github.com/bkielbasa/cyclop v1.2.1 -## explicit; go 1.20 +# github.com/bkielbasa/cyclop v1.2.3 +## explicit; go 1.22.0 github.com/bkielbasa/cyclop/pkg/analyzer # github.com/blang/semver v3.5.1+incompatible ## explicit @@ -513,11 +513,11 @@ github.com/bmatcuk/doublestar/v4 # github.com/bombsimon/wsl/v4 v4.4.1 ## explicit; go 1.21 github.com/bombsimon/wsl/v4 -# github.com/breml/bidichk v0.2.7 -## explicit; go 1.20 +# github.com/breml/bidichk v0.3.2 +## explicit; go 1.22.0 github.com/breml/bidichk/pkg/bidichk -# github.com/breml/errchkjson v0.3.6 -## explicit; go 1.20 +# github.com/breml/errchkjson v0.4.0 +## explicit; go 1.22.0 github.com/breml/errchkjson # github.com/buildkite/agent/v3 v3.81.0 ## explicit; go 1.22.6 @@ -584,7 +584,7 @@ github.com/chavacava/garif github.com/chrismellard/docker-credential-acr-env/pkg/credhelper github.com/chrismellard/docker-credential-acr-env/pkg/registry github.com/chrismellard/docker-credential-acr-env/pkg/token -# github.com/ckaznocha/intrange v0.2.0 +# github.com/ckaznocha/intrange v0.2.1 ## explicit; go 1.22 github.com/ckaznocha/intrange # github.com/clbanning/mxj/v2 v2.7.0 @@ -760,7 +760,7 @@ github.com/ettle/strcase ## explicit; go 1.18 github.com/evanphx/json-patch/v5 github.com/evanphx/json-patch/v5/internal/json -# github.com/fatih/color v1.17.0 +# github.com/fatih/color v1.18.0 ## explicit; go 1.17 github.com/fatih/color # github.com/fatih/structtag v1.2.0 @@ -782,14 +782,14 @@ github.com/fxamacker/cbor/v2 # github.com/fzipp/gocyclo v0.6.0 ## explicit; go 1.18 github.com/fzipp/gocyclo -# github.com/ghostiam/protogetter v0.3.6 -## explicit; go 1.19 +# github.com/ghostiam/protogetter v0.3.8 +## explicit; go 1.22.0 github.com/ghostiam/protogetter # github.com/go-chi/chi v4.1.2+incompatible ## explicit github.com/go-chi/chi github.com/go-chi/chi/middleware -# github.com/go-critic/go-critic v0.11.4 +# github.com/go-critic/go-critic v0.11.5 ## explicit; go 1.18 github.com/go-critic/go-critic/checkers github.com/go-critic/go-critic/checkers/internal/astwalk @@ -889,7 +889,7 @@ github.com/go-toolsmith/strparse # github.com/go-toolsmith/typep v1.1.0 ## explicit; go 1.16 github.com/go-toolsmith/typep -# github.com/go-viper/mapstructure/v2 v2.1.0 +# github.com/go-viper/mapstructure/v2 v2.2.1 ## explicit; go 1.18 github.com/go-viper/mapstructure/v2 github.com/go-viper/mapstructure/v2/internal/errors @@ -940,19 +940,23 @@ github.com/golangci/dupl/printer github.com/golangci/dupl/suffixtree github.com/golangci/dupl/syntax github.com/golangci/dupl/syntax/golang +# github.com/golangci/go-printf-func-name v0.1.0 +## explicit; go 1.22.0 +github.com/golangci/go-printf-func-name/pkg/analyzer # github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9 ## explicit; go 1.22 github.com/golangci/gofmt/gofmt github.com/golangci/gofmt/gofmt/internal/diff github.com/golangci/gofmt/goimports -# github.com/golangci/golangci-lint v1.61.0 +# github.com/golangci/golangci-lint v1.62.0 ## explicit; go 1.22.1 github.com/golangci/golangci-lint/cmd/golangci-lint github.com/golangci/golangci-lint/internal/cache github.com/golangci/golangci-lint/internal/errorutil -github.com/golangci/golangci-lint/internal/pkgcache -github.com/golangci/golangci-lint/internal/renameio -github.com/golangci/golangci-lint/internal/robustio +github.com/golangci/golangci-lint/internal/go/cache +github.com/golangci/golangci-lint/internal/go/mmap +github.com/golangci/golangci-lint/internal/go/quoted +github.com/golangci/golangci-lint/internal/go/robustio github.com/golangci/golangci-lint/pkg/commands github.com/golangci/golangci-lint/pkg/commands/internal github.com/golangci/golangci-lint/pkg/config @@ -982,7 +986,6 @@ github.com/golangci/golangci-lint/pkg/golinters/errcheck github.com/golangci/golangci-lint/pkg/golinters/errchkjson github.com/golangci/golangci-lint/pkg/golinters/errname github.com/golangci/golangci-lint/pkg/golinters/errorlint -github.com/golangci/golangci-lint/pkg/golinters/execinquery github.com/golangci/golangci-lint/pkg/golinters/exhaustive github.com/golangci/golangci-lint/pkg/golinters/exhaustruct github.com/golangci/golangci-lint/pkg/golinters/exportloopref @@ -1014,6 +1017,7 @@ github.com/golangci/golangci-lint/pkg/golinters/gosimple github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan github.com/golangci/golangci-lint/pkg/golinters/govet github.com/golangci/golangci-lint/pkg/golinters/grouper +github.com/golangci/golangci-lint/pkg/golinters/iface github.com/golangci/golangci-lint/pkg/golinters/importas github.com/golangci/golangci-lint/pkg/golinters/inamedparam github.com/golangci/golangci-lint/pkg/golinters/ineffassign @@ -1046,6 +1050,7 @@ github.com/golangci/golangci-lint/pkg/golinters/predeclared github.com/golangci/golangci-lint/pkg/golinters/promlinter github.com/golangci/golangci-lint/pkg/golinters/protogetter github.com/golangci/golangci-lint/pkg/golinters/reassign +github.com/golangci/golangci-lint/pkg/golinters/recvcheck github.com/golangci/golangci-lint/pkg/golinters/revive github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck github.com/golangci/golangci-lint/pkg/golinters/sloglint @@ -1406,9 +1411,6 @@ github.com/jgautheron/goconst # github.com/jingyugao/rowserrcheck v1.1.1 ## explicit; go 1.13 github.com/jingyugao/rowserrcheck/passes/rowserr -# github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af -## explicit; go 1.13 -github.com/jirfag/go-printf-func-name/pkg/analyzer # github.com/jjti/go-spancheck v0.6.2 ## explicit; go 1.20 github.com/jjti/go-spancheck @@ -1433,7 +1435,7 @@ github.com/kelseyhightower/envconfig # github.com/kevinburke/ssh_config v1.2.0 ## explicit github.com/kevinburke/ssh_config -# github.com/kisielk/errcheck v1.7.0 +# github.com/kisielk/errcheck v1.8.0 ## explicit; go 1.18 github.com/kisielk/errcheck/errcheck # github.com/kkHAIKE/contextcheck v1.1.5 @@ -1463,8 +1465,8 @@ github.com/kylelemons/godebug/pretty # github.com/kyoh86/exportloopref v0.1.11 ## explicit; go 1.18 github.com/kyoh86/exportloopref -# github.com/lasiar/canonicalheader v1.1.1 -## explicit; go 1.21 +# github.com/lasiar/canonicalheader v1.1.2 +## explicit; go 1.22.0 github.com/lasiar/canonicalheader # github.com/ldez/gomoddirectives v0.2.4 ## explicit; go 1.21 @@ -1488,9 +1490,6 @@ github.com/letsencrypt/boulder/identifier github.com/letsencrypt/boulder/probs github.com/letsencrypt/boulder/revocation github.com/letsencrypt/boulder/strictyaml -# github.com/lufeee/execinquery v1.2.1 -## explicit; go 1.17 -github.com/lufeee/execinquery # github.com/macabu/inamedparam v0.1.3 ## explicit; go 1.20 github.com/macabu/inamedparam @@ -1517,11 +1516,11 @@ github.com/mattn/go-colorable # github.com/mattn/go-isatty v0.0.20 ## explicit; go 1.15 github.com/mattn/go-isatty -# github.com/mattn/go-runewidth v0.0.15 +# github.com/mattn/go-runewidth v0.0.16 ## explicit; go 1.9 github.com/mattn/go-runewidth -# github.com/mgechev/revive v1.3.9 -## explicit; go 1.21 +# github.com/mgechev/revive v1.5.0 +## explicit; go 1.22.1 github.com/mgechev/revive/config github.com/mgechev/revive/formatter github.com/mgechev/revive/internal/ifelse @@ -1571,15 +1570,22 @@ github.com/nishanths/predeclared/passes/predeclared # github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 ## explicit github.com/nozzle/throttler -# github.com/nunnatsa/ginkgolinter v0.16.2 -## explicit; go 1.21 +# github.com/nunnatsa/ginkgolinter v0.18.0 +## explicit; go 1.22.0 github.com/nunnatsa/ginkgolinter +github.com/nunnatsa/ginkgolinter/internal/expression +github.com/nunnatsa/ginkgolinter/internal/expression/actual +github.com/nunnatsa/ginkgolinter/internal/expression/matcher +github.com/nunnatsa/ginkgolinter/internal/expression/value +github.com/nunnatsa/ginkgolinter/internal/formatter github.com/nunnatsa/ginkgolinter/internal/ginkgohandler github.com/nunnatsa/ginkgolinter/internal/gomegahandler +github.com/nunnatsa/ginkgolinter/internal/gomegainfo github.com/nunnatsa/ginkgolinter/internal/interfaces github.com/nunnatsa/ginkgolinter/internal/intervals github.com/nunnatsa/ginkgolinter/internal/reports github.com/nunnatsa/ginkgolinter/internal/reverseassertion +github.com/nunnatsa/ginkgolinter/internal/rules github.com/nunnatsa/ginkgolinter/linter github.com/nunnatsa/ginkgolinter/types github.com/nunnatsa/ginkgolinter/version @@ -1703,12 +1709,21 @@ github.com/quasilyte/regex/syntax # github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567 ## explicit; go 1.17 github.com/quasilyte/stdinfo +# github.com/raeperd/recvcheck v0.1.2 +## explicit; go 1.22.0 +github.com/raeperd/recvcheck # github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 ## explicit github.com/rcrowley/go-metrics -# github.com/rivo/uniseg v0.4.4 +# github.com/rivo/uniseg v0.4.7 ## explicit; go 1.18 github.com/rivo/uniseg +# github.com/rogpeppe/go-internal v1.13.1 +## explicit; go 1.22 +github.com/rogpeppe/go-internal/internal/syscall/windows +github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll +github.com/rogpeppe/go-internal/lockedfile +github.com/rogpeppe/go-internal/lockedfile/internal/filelock # github.com/ryancurrah/gomodguard v1.3.5 ## explicit; go 1.22.0 github.com/ryancurrah/gomodguard @@ -1748,8 +1763,8 @@ github.com/secure-systems-lab/go-securesystemslib/cjson github.com/secure-systems-lab/go-securesystemslib/dsse github.com/secure-systems-lab/go-securesystemslib/encrypted github.com/secure-systems-lab/go-securesystemslib/signerverifier -# github.com/securego/gosec/v2 v2.21.2 -## explicit; go 1.22 +# github.com/securego/gosec/v2 v2.21.4 +## explicit; go 1.22.0 github.com/securego/gosec/v2 github.com/securego/gosec/v2/analyzers github.com/securego/gosec/v2/cwe @@ -1880,14 +1895,14 @@ github.com/sirupsen/logrus # github.com/sivchari/containedctx v1.0.3 ## explicit; go 1.17 github.com/sivchari/containedctx -# github.com/sivchari/tenv v1.10.0 -## explicit; go 1.21.0 +# github.com/sivchari/tenv v1.12.1 +## explicit; go 1.22.0 github.com/sivchari/tenv # github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 ## explicit github.com/skratchdot/open-golang/open -# github.com/sonatard/noctx v0.0.2 -## explicit; go 1.20 +# github.com/sonatard/noctx v0.1.0 +## explicit; go 1.22.0 github.com/sonatard/noctx github.com/sonatard/noctx/ngfunc github.com/sonatard/noctx/reqwithoutctx @@ -1983,7 +1998,7 @@ github.com/syndtr/goleveldb/leveldb/util # github.com/tdakkota/asciicheck v0.2.0 ## explicit; go 1.18 github.com/tdakkota/asciicheck -# github.com/tektoncd/pipeline v0.65.0 +# github.com/tektoncd/pipeline v0.65.1 ## explicit; go 1.22 github.com/tektoncd/pipeline/internal/artifactref github.com/tektoncd/pipeline/pkg/apis/config @@ -2080,7 +2095,7 @@ github.com/tektoncd/pipeline/test ## explicit; go 1.19 github.com/tektoncd/plumbing github.com/tektoncd/plumbing/scripts -# github.com/tetafro/godot v1.4.17 +# github.com/tetafro/godot v1.4.18 ## explicit; go 1.20 github.com/tetafro/godot # github.com/thales-e-security/pool v0.0.2 @@ -2104,10 +2119,9 @@ github.com/theupdateframework/go-tuf/verify # github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966 ## explicit; go 1.12 github.com/timakin/bodyclose/passes/bodyclose -# github.com/timonwong/loggercheck v0.9.4 -## explicit; go 1.18 +# github.com/timonwong/loggercheck v0.10.1 +## explicit; go 1.22.0 github.com/timonwong/loggercheck -github.com/timonwong/loggercheck/internal/bytebufferpool github.com/timonwong/loggercheck/internal/checkers github.com/timonwong/loggercheck/internal/checkers/printf github.com/timonwong/loggercheck/internal/rules @@ -2142,6 +2156,12 @@ github.com/ultraware/whitespace # github.com/uudashr/gocognit v1.1.3 ## explicit; go 1.18 github.com/uudashr/gocognit +# github.com/uudashr/iface v1.2.0 +## explicit; go 1.21.0 +github.com/uudashr/iface/identical +github.com/uudashr/iface/internal/directive +github.com/uudashr/iface/opaque +github.com/uudashr/iface/unused # github.com/vbatts/tar-split v0.11.5 ## explicit; go 1.17 github.com/vbatts/tar-split/archive/tar @@ -2186,7 +2206,7 @@ github.com/zeebo/errs # gitlab.com/bosi/decorder v0.4.2 ## explicit; go 1.20 gitlab.com/bosi/decorder -# go-simpler.org/musttag v0.12.2 +# go-simpler.org/musttag v0.13.0 ## explicit; go 1.20 go-simpler.org/musttag # go-simpler.org/sloglint v0.7.2 @@ -2327,8 +2347,8 @@ go.step.sm/crypto/keyutil go.step.sm/crypto/pemutil go.step.sm/crypto/randutil go.step.sm/crypto/x25519 -# go.uber.org/automaxprocs v1.5.3 -## explicit; go 1.18 +# go.uber.org/automaxprocs v1.6.0 +## explicit; go 1.20 go.uber.org/automaxprocs/internal/cgroups go.uber.org/automaxprocs/internal/runtime go.uber.org/automaxprocs/maxprocs @@ -2374,7 +2394,7 @@ gocloud.dev/docstore/mongodocstore # gocloud.dev/pubsub/kafkapubsub v0.40.0 ## explicit; go 1.21.0 gocloud.dev/pubsub/kafkapubsub -# golang.org/x/crypto v0.28.0 +# golang.org/x/crypto v0.29.0 ## explicit; go 1.20 golang.org/x/crypto/argon2 golang.org/x/crypto/blake2b @@ -2410,25 +2430,25 @@ golang.org/x/crypto/ssh/agent golang.org/x/crypto/ssh/internal/bcrypt_pbkdf golang.org/x/crypto/ssh/knownhosts golang.org/x/crypto/ssh/terminal -# golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e -## explicit; go 1.20 +# golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 +## explicit; go 1.22.0 golang.org/x/exp/constraints golang.org/x/exp/maps golang.org/x/exp/slices golang.org/x/exp/slog golang.org/x/exp/slog/internal golang.org/x/exp/slog/internal/buffer -# golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f +# golang.org/x/exp/typeparams v0.0.0-20240909161429-701f63a606c0 ## explicit; go 1.18 golang.org/x/exp/typeparams -# golang.org/x/mod v0.21.0 +# golang.org/x/mod v0.22.0 ## explicit; go 1.22.0 golang.org/x/mod/internal/lazyregexp golang.org/x/mod/modfile golang.org/x/mod/module golang.org/x/mod/semver golang.org/x/mod/sumdb/note -# golang.org/x/net v0.30.0 +# golang.org/x/net v0.31.0 ## explicit; go 1.18 golang.org/x/net/context golang.org/x/net/context/ctxhttp @@ -2453,22 +2473,22 @@ golang.org/x/oauth2/google/internal/stsexchange golang.org/x/oauth2/internal golang.org/x/oauth2/jws golang.org/x/oauth2/jwt -# golang.org/x/sync v0.8.0 +# golang.org/x/sync v0.9.0 ## explicit; go 1.18 golang.org/x/sync/errgroup golang.org/x/sync/semaphore golang.org/x/sync/singleflight -# golang.org/x/sys v0.26.0 +# golang.org/x/sys v0.27.0 ## explicit; go 1.18 golang.org/x/sys/cpu golang.org/x/sys/plan9 golang.org/x/sys/unix golang.org/x/sys/windows golang.org/x/sys/windows/registry -# golang.org/x/term v0.25.0 +# golang.org/x/term v0.26.0 ## explicit; go 1.18 golang.org/x/term -# golang.org/x/text v0.19.0 +# golang.org/x/text v0.20.0 ## explicit; go 1.18 golang.org/x/text/encoding golang.org/x/text/encoding/internal @@ -2484,8 +2504,8 @@ golang.org/x/text/width # golang.org/x/time v0.7.0 ## explicit; go 1.18 golang.org/x/time/rate -# golang.org/x/tools v0.24.0 -## explicit; go 1.19 +# golang.org/x/tools v0.27.0 +## explicit; go 1.22.0 golang.org/x/tools/go/analysis golang.org/x/tools/go/analysis/passes/appends golang.org/x/tools/go/analysis/passes/asmdecl @@ -2559,7 +2579,6 @@ golang.org/x/tools/internal/imports golang.org/x/tools/internal/packagesinternal golang.org/x/tools/internal/pkgbits golang.org/x/tools/internal/stdlib -golang.org/x/tools/internal/tokeninternal golang.org/x/tools/internal/typeparams golang.org/x/tools/internal/typesinternal golang.org/x/tools/internal/versions @@ -2614,8 +2633,8 @@ google.golang.org/genproto/googleapis/api/monitoredres google.golang.org/genproto/googleapis/rpc/code google.golang.org/genproto/googleapis/rpc/errdetails google.golang.org/genproto/googleapis/rpc/status -# google.golang.org/grpc v1.67.1 -## explicit; go 1.21 +# google.golang.org/grpc v1.68.0 +## explicit; go 1.22.7 google.golang.org/grpc google.golang.org/grpc/attributes google.golang.org/grpc/authz/audit @@ -2628,6 +2647,8 @@ google.golang.org/grpc/balancer/grpclb/grpc_lb_v1 google.golang.org/grpc/balancer/grpclb/state google.golang.org/grpc/balancer/leastrequest google.golang.org/grpc/balancer/pickfirst +google.golang.org/grpc/balancer/pickfirst/internal +google.golang.org/grpc/balancer/pickfirst/pickfirstleaf google.golang.org/grpc/balancer/rls google.golang.org/grpc/balancer/rls/internal/adaptive google.golang.org/grpc/balancer/rls/internal/keys @@ -2750,7 +2771,7 @@ google.golang.org/grpc/xds/internal/xdsclient/xdsresource/version ## explicit; go 1.21 google.golang.org/grpc/stats/opentelemetry google.golang.org/grpc/stats/opentelemetry/internal -# google.golang.org/protobuf v1.35.1 +# google.golang.org/protobuf v1.35.2 ## explicit; go 1.21 google.golang.org/protobuf/encoding/protodelim google.golang.org/protobuf/encoding/protojson