aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorTaras Madan <tarasmadan@google.com>2024-11-11 11:41:38 +0100
committerTaras Madan <tarasmadan@google.com>2024-11-11 11:10:48 +0000
commit27e76fae2ee2d84dc7db63af1d9ed7358ba35b7a (patch)
treeed19c0e35e272b3c4cc5a2f2c595e035b2428337
parent621e84e063b0e15b23e17780338627c509e1b9e8 (diff)
vendor: update
-rw-r--r--vendor/cloud.google.com/go/auth/CHANGES.md7
-rw-r--r--vendor/cloud.google.com/go/auth/credentials/filetypes.go6
-rw-r--r--vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go4
-rw-r--r--vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go131
-rw-r--r--vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go199
-rw-r--r--vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go33
-rw-r--r--vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go4
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go46
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go8
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go18
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go3
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go9
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go71
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go34
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go101
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go16
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go15
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go81
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go10
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go81
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go101
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go40
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go20
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go35
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go5
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go59
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/len.go17
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go (renamed from vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go)10
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go5
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go6
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go44
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go7
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go4
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go2
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go4
-rw-r--r--vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go87
-rw-r--r--vendor/github.com/alecthomas/go-check-sumtype/README.md3
-rw-r--r--vendor/github.com/alecthomas/go-check-sumtype/check.go10
-rw-r--r--vendor/github.com/alecthomas/go-check-sumtype/config.go5
-rw-r--r--vendor/github.com/alecthomas/go-check-sumtype/renovate.json518
-rw-r--r--vendor/github.com/alecthomas/go-check-sumtype/run.go4
-rw-r--r--vendor/github.com/alexkohler/nakedret/v2/nakedret.go27
-rw-r--r--vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go20
-rw-r--r--vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go33
-rw-r--r--vendor/github.com/breml/errchkjson/.goreleaser.yml7
-rw-r--r--vendor/github.com/breml/errchkjson/README.md2
-rw-r--r--vendor/github.com/breml/errchkjson/errchkjson.go2
-rw-r--r--vendor/github.com/ckaznocha/intrange/intrange.go60
-rw-r--r--vendor/github.com/fatih/color/README.md23
-rw-r--r--vendor/github.com/fatih/color/color.go32
-rw-r--r--vendor/github.com/ghostiam/protogetter/processor.go2
-rw-r--r--vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go2
-rw-r--r--vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go2
-rw-r--r--vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go100
-rw-r--r--vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go2
-rw-r--r--vendor/github.com/go-critic/go-critic/linter/helpers.go2
-rw-r--r--vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go21
-rw-r--r--vendor/github.com/go-viper/mapstructure/v2/mapstructure.go69
-rw-r--r--vendor/github.com/golangci/go-printf-func-name/LICENSE (renamed from vendor/github.com/jirfag/go-printf-func-name/LICENSE)1
-rw-r--r--vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go (renamed from vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go)8
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/cache/cache.go631
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/cache/readme.md18
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/LICENSE27
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go663
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go12
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go (renamed from vendor/github.com/golangci/golangci-lint/internal/cache/default.go)57
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go6
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go (renamed from vendor/github.com/golangci/golangci-lint/internal/cache/hash.go)24
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go5
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go428
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md51
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go31
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go21
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go36
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go41
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md15
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go129
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md13
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md)5
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go)0
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go)0
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go)0
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go)0
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go (renamed from vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go)0
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go229
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md10
-rw-r--r--vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go93
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go1
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/commands/run.go5
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/config/config.go30
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go27
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/config/loader.go17
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go12
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go321
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go127
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go370
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go125
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go18
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go160
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go172
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go5
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go10
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go57
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go11
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go21
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go11
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go (renamed from vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go)6
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go12
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/lint/context.go6
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go2
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go4
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go45
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go28
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go9
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go12
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go7
-rw-r--r--vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go22
-rw-r--r--vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go3
-rw-r--r--vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go10
-rw-r--r--vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go10
-rw-r--r--vendor/github.com/kisielk/errcheck/errcheck/errcheck.go50
-rw-r--r--vendor/github.com/kisielk/errcheck/errcheck/excludes.go5
-rw-r--r--vendor/github.com/lasiar/canonicalheader/.golangci.yaml86
-rw-r--r--vendor/github.com/lasiar/canonicalheader/analyzer.go1
-rw-r--r--vendor/github.com/lufeee/execinquery/.gitignore1
-rw-r--r--vendor/github.com/lufeee/execinquery/README.md76
-rw-r--r--vendor/github.com/lufeee/execinquery/execinquery.go135
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_table.go323
-rw-r--r--vendor/github.com/mgechev/revive/config/config.go2
-rw-r--r--vendor/github.com/mgechev/revive/lint/file.go2
-rw-r--r--vendor/github.com/mgechev/revive/lint/linter.go65
-rw-r--r--vendor/github.com/mgechev/revive/lint/name.go (renamed from vendor/github.com/mgechev/revive/lint/utils.go)0
-rw-r--r--vendor/github.com/mgechev/revive/lint/package.go16
-rw-r--r--vendor/github.com/mgechev/revive/rule/add-constant.go5
-rw-r--r--vendor/github.com/mgechev/revive/rule/argument-limit.go61
-rw-r--r--vendor/github.com/mgechev/revive/rule/blank-imports.go8
-rw-r--r--vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go1
-rw-r--r--vendor/github.com/mgechev/revive/rule/cognitive-complexity.go22
-rw-r--r--vendor/github.com/mgechev/revive/rule/comment-spacings.go17
-rw-r--r--vendor/github.com/mgechev/revive/rule/comments-density.go3
-rw-r--r--vendor/github.com/mgechev/revive/rule/constant-logical-expr.go5
-rw-r--r--vendor/github.com/mgechev/revive/rule/cyclomatic.go58
-rw-r--r--vendor/github.com/mgechev/revive/rule/datarace.go4
-rw-r--r--vendor/github.com/mgechev/revive/rule/deep-exit.go5
-rw-r--r--vendor/github.com/mgechev/revive/rule/defer.go10
-rw-r--r--vendor/github.com/mgechev/revive/rule/dot-imports.go25
-rw-r--r--vendor/github.com/mgechev/revive/rule/early-return.go12
-rw-r--r--vendor/github.com/mgechev/revive/rule/enforce-map-style.go5
-rw-r--r--vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go24
-rw-r--r--vendor/github.com/mgechev/revive/rule/enforce-slice-style.go17
-rw-r--r--vendor/github.com/mgechev/revive/rule/exported.go222
-rw-r--r--vendor/github.com/mgechev/revive/rule/file-header.go20
-rw-r--r--vendor/github.com/mgechev/revive/rule/file-length-limit.go138
-rw-r--r--vendor/github.com/mgechev/revive/rule/filename-format.go87
-rw-r--r--vendor/github.com/mgechev/revive/rule/function-length.go20
-rw-r--r--vendor/github.com/mgechev/revive/rule/function-result-limit.go37
-rw-r--r--vendor/github.com/mgechev/revive/rule/get-return.go22
-rw-r--r--vendor/github.com/mgechev/revive/rule/identical-branches.go15
-rw-r--r--vendor/github.com/mgechev/revive/rule/import-alias-naming.go2
-rw-r--r--vendor/github.com/mgechev/revive/rule/indent-error-flow.go12
-rw-r--r--vendor/github.com/mgechev/revive/rule/line-length-limit.go22
-rw-r--r--vendor/github.com/mgechev/revive/rule/max-control-nesting.go6
-rw-r--r--vendor/github.com/mgechev/revive/rule/max-public-structs.go24
-rw-r--r--vendor/github.com/mgechev/revive/rule/receiver-naming.go62
-rw-r--r--vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go57
-rw-r--r--vendor/github.com/mgechev/revive/rule/string-format.go129
-rw-r--r--vendor/github.com/mgechev/revive/rule/struct-tag.go46
-rw-r--r--vendor/github.com/mgechev/revive/rule/superfluous-else.go13
-rw-r--r--vendor/github.com/mgechev/revive/rule/time-equal.go19
-rw-r--r--vendor/github.com/mgechev/revive/rule/time-naming.go5
-rw-r--r--vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go28
-rw-r--r--vendor/github.com/mgechev/revive/rule/unconditional-recursion.go7
-rw-r--r--vendor/github.com/mgechev/revive/rule/unhandled-error.go41
-rw-r--r--vendor/github.com/mgechev/revive/rule/use-any.go2
-rw-r--r--vendor/github.com/mgechev/revive/rule/var-declarations.go16
-rw-r--r--vendor/github.com/mgechev/revive/rule/var-naming.go35
-rw-r--r--vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go2
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/.gitignore1
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/Makefile12
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/README.md46
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/analyzer.go16
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/doc.go17
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go118
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go235
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go123
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go38
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go260
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go315
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go77
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go128
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go124
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go199
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go11
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go86
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go148
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go66
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go62
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go221
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go22
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go36
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go63
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go123
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go195
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go49
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go99
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go215
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go112
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go113
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go2
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go339
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go28
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go41
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go30
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go79
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go128
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go64
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go75
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go30
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go36
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go119
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go29
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go35
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go43
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go23
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go35
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go119
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go12
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go110
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go27
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go75
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go61
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go41
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go1604
-rw-r--r--vendor/github.com/nunnatsa/ginkgolinter/types/config.go2
-rw-r--r--vendor/github.com/raeperd/recvcheck/.gitignore2
-rw-r--r--vendor/github.com/raeperd/recvcheck/LICENSE (renamed from vendor/github.com/lufeee/execinquery/LICENSE)2
-rw-r--r--vendor/github.com/raeperd/recvcheck/Makefile14
-rw-r--r--vendor/github.com/raeperd/recvcheck/README.md52
-rw-r--r--vendor/github.com/raeperd/recvcheck/analyzer.go69
-rw-r--r--vendor/github.com/rivo/uniseg/README.md30
-rw-r--r--vendor/github.com/rivo/uniseg/eastasianwidth.go78
-rw-r--r--vendor/github.com/rivo/uniseg/emojipresentation.go18
-rw-r--r--vendor/github.com/rivo/uniseg/gen_breaktest.go10
-rw-r--r--vendor/github.com/rivo/uniseg/gen_properties.go13
-rw-r--r--vendor/github.com/rivo/uniseg/grapheme.go31
-rw-r--r--vendor/github.com/rivo/uniseg/graphemeproperties.go58
-rw-r--r--vendor/github.com/rivo/uniseg/graphemerules.go176
-rw-r--r--vendor/github.com/rivo/uniseg/line.go10
-rw-r--r--vendor/github.com/rivo/uniseg/lineproperties.go109
-rw-r--r--vendor/github.com/rivo/uniseg/linerules.go522
-rw-r--r--vendor/github.com/rivo/uniseg/properties.go48
-rw-r--r--vendor/github.com/rivo/uniseg/sentenceproperties.go54
-rw-r--r--vendor/github.com/rivo/uniseg/sentencerules.go265
-rw-r--r--vendor/github.com/rivo/uniseg/step.go24
-rw-r--r--vendor/github.com/rivo/uniseg/width.go9
-rw-r--r--vendor/github.com/rivo/uniseg/wordproperties.go71
-rw-r--r--vendor/github.com/rivo/uniseg/wordrules.go160
-rw-r--r--vendor/github.com/rogpeppe/go-internal/LICENSE27
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go7
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go20
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go64
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go128
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go39
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go307
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go28
-rw-r--r--vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go363
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go99
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go214
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go36
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go44
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go67
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go187
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go65
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go94
-rw-r--r--vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go67
-rw-r--r--vendor/github.com/securego/gosec/v2/action.yml2
-rw-r--r--vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go2
-rw-r--r--vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go92
-rw-r--r--vendor/github.com/sivchari/tenv/goreleaser.yaml26
-rw-r--r--vendor/github.com/sivchari/tenv/tenv.go147
-rw-r--r--vendor/github.com/sonatard/noctx/.gitignore1
-rw-r--r--vendor/github.com/sonatard/noctx/.golangci.yml52
-rw-r--r--vendor/github.com/sonatard/noctx/.goreleaser.yml37
-rw-r--r--vendor/github.com/sonatard/noctx/Makefile6
-rw-r--r--vendor/github.com/sonatard/noctx/README.md7
-rw-r--r--vendor/github.com/sonatard/noctx/ngfunc/main.go1
-rw-r--r--vendor/github.com/sonatard/noctx/ngfunc/report.go2
-rw-r--r--vendor/github.com/sonatard/noctx/ngfunc/types.go4
-rw-r--r--vendor/github.com/sonatard/noctx/noctx.go1
-rw-r--r--vendor/github.com/sonatard/noctx/reqwithoutctx/report.go2
-rw-r--r--vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go3
-rw-r--r--vendor/github.com/tetafro/godot/checks.go2
-rw-r--r--vendor/github.com/tetafro/godot/getters.go6
-rw-r--r--vendor/github.com/timonwong/loggercheck/.golangci.yml16
-rw-r--r--vendor/github.com/timonwong/loggercheck/README.md9
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go22
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go3
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/common.go7
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go35
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go2
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go19
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go31
-rw-r--r--vendor/github.com/timonwong/loggercheck/internal/rules/rules.go5
-rw-r--r--vendor/github.com/timonwong/loggercheck/loggercheck.go2
-rw-r--r--vendor/github.com/timonwong/loggercheck/staticrules.go30
-rw-r--r--vendor/github.com/uudashr/iface/LICENSE201
-rw-r--r--vendor/github.com/uudashr/iface/identical/doc.go3
-rw-r--r--vendor/github.com/uudashr/iface/identical/identical.go138
-rw-r--r--vendor/github.com/uudashr/iface/internal/directive/directive.go76
-rw-r--r--vendor/github.com/uudashr/iface/opaque/doc.go3
-rw-r--r--vendor/github.com/uudashr/iface/opaque/opaque.go321
-rw-r--r--vendor/github.com/uudashr/iface/unused/doc.go3
-rw-r--r--vendor/github.com/uudashr/iface/unused/unused.go138
-rw-r--r--vendor/go-simpler.org/musttag/builtins.go168
-rw-r--r--vendor/go-simpler.org/musttag/musttag.go48
-rw-r--r--vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go14
-rw-r--r--vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go2
-rw-r--r--vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go7
-rw-r--r--vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go21
-rw-r--r--vendor/go.uber.org/automaxprocs/maxprocs/version.go2
-rw-r--r--vendor/golang.org/x/crypto/chacha20/chacha_noasm.go2
-rw-r--r--vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go (renamed from vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go)2
-rw-r--r--vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s (renamed from vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s)114
-rw-r--r--vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go2
-rw-r--r--vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go (renamed from vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go)2
-rw-r--r--vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s (renamed from vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s)30
-rw-r--r--vendor/golang.org/x/exp/slices/cmp.go44
-rw-r--r--vendor/golang.org/x/exp/slices/slices.go515
-rw-r--r--vendor/golang.org/x/exp/slices/sort.go197
-rw-r--r--vendor/golang.org/x/exp/slices/zsortanyfunc.go479
-rw-r--r--vendor/golang.org/x/exp/slices/zsortordered.go481
-rw-r--r--vendor/golang.org/x/exp/typeparams/LICENSE4
-rw-r--r--vendor/golang.org/x/net/http2/client_conn_pool.go8
-rw-r--r--vendor/golang.org/x/net/http2/server.go34
-rw-r--r--vendor/golang.org/x/net/http2/transport.go242
-rw-r--r--vendor/golang.org/x/net/http2/unencrypted.go32
-rw-r--r--vendor/golang.org/x/term/README.md11
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/goimports.go2
-rw-r--r--vendor/golang.org/x/tools/cmd/goimports/gotypesalias.go12
-rw-r--r--vendor/golang.org/x/tools/cmd/stringer/gotypesalias.go12
-rw-r--r--vendor/golang.org/x/tools/cmd/stringer/stringer.go3
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go20
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go2
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go4
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go4
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go6
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go2
-rw-r--r--vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go10
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go5
-rw-r--r--vendor/golang.org/x/tools/go/ast/inspector/inspector.go4
-rw-r--r--vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go97
-rw-r--r--vendor/golang.org/x/tools/go/loader/loader.go26
-rw-r--r--vendor/golang.org/x/tools/go/packages/external.go6
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go38
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go325
-rw-r--r--vendor/golang.org/x/tools/go/ssa/ssautil/load.go17
-rw-r--r--vendor/golang.org/x/tools/go/types/objectpath/objectpath.go99
-rw-r--r--vendor/golang.org/x/tools/internal/analysisinternal/analysis.go45
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iexport.go24
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iimport.go12
-rw-r--r--vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go53
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go233
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go31
-rw-r--r--vendor/golang.org/x/tools/internal/imports/source.go63
-rw-r--r--vendor/golang.org/x/tools/internal/imports/source_env.go125
-rw-r--r--vendor/golang.org/x/tools/internal/testenv/testenv.go63
-rw-r--r--vendor/golang.org/x/tools/internal/typeparams/free.go17
-rw-r--r--vendor/golang.org/x/tools/internal/typesinternal/types.go56
-rw-r--r--vendor/golang.org/x/tools/internal/versions/types.go5
-rw-r--r--vendor/google.golang.org/api/bigquery/v2/bigquery-api.json51
-rw-r--r--vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go244
-rw-r--r--vendor/google.golang.org/api/compute/v1/compute-api.json30
-rw-r--r--vendor/google.golang.org/api/compute/v1/compute-gen.go32
-rw-r--r--vendor/google.golang.org/api/compute/v1/compute2-gen.go4
-rw-r--r--vendor/google.golang.org/api/compute/v1/compute3-gen.go4
-rw-r--r--vendor/google.golang.org/api/internal/version.go2
-rw-r--r--vendor/modules.txt155
388 files changed, 15607 insertions, 8606 deletions
diff --git a/vendor/cloud.google.com/go/auth/CHANGES.md b/vendor/cloud.google.com/go/auth/CHANGES.md
index 5584c350b..e82cf5a90 100644
--- a/vendor/cloud.google.com/go/auth/CHANGES.md
+++ b/vendor/cloud.google.com/go/auth/CHANGES.md
@@ -1,5 +1,12 @@
# Changelog
+## [0.9.4](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.3...auth/v0.9.4) (2024-09-11)
+
+
+### Bug Fixes
+
+* **auth:** Enable self-signed JWT for non-GDU universe domain ([#10831](https://github.com/googleapis/google-cloud-go/issues/10831)) ([f9869f7](https://github.com/googleapis/google-cloud-go/commit/f9869f7903cfd34d1b97c25d0dc5669d2c5138e6))
+
## [0.9.3](https://github.com/googleapis/google-cloud-go/compare/auth/v0.9.2...auth/v0.9.3) (2024-09-03)
diff --git a/vendor/cloud.google.com/go/auth/credentials/filetypes.go b/vendor/cloud.google.com/go/auth/credentials/filetypes.go
index cf56b025a..6591b1811 100644
--- a/vendor/cloud.google.com/go/auth/credentials/filetypes.go
+++ b/vendor/cloud.google.com/go/auth/credentials/filetypes.go
@@ -124,8 +124,14 @@ func resolveUniverseDomain(optsUniverseDomain, fileUniverseDomain string) string
}
func handleServiceAccount(f *credsfile.ServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ ud := resolveUniverseDomain(opts.UniverseDomain, f.UniverseDomain)
if opts.UseSelfSignedJWT {
return configureSelfSignedJWT(f, opts)
+ } else if ud != "" && ud != internalauth.DefaultUniverseDomain {
+ // For non-GDU universe domains, token exchange is impossible and services
+ // must support self-signed JWTs.
+ opts.UseSelfSignedJWT = true
+ return configureSelfSignedJWT(f, opts)
}
opts2LO := &auth.Options2LO{
Email: f.ClientEmail,
diff --git a/vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go b/vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go
index b62a8ae4d..6ae29de6c 100644
--- a/vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go
+++ b/vendor/cloud.google.com/go/auth/credentials/selfsignedjwt.go
@@ -17,6 +17,7 @@ package credentials
import (
"context"
"crypto/rsa"
+ "errors"
"fmt"
"strings"
"time"
@@ -35,6 +36,9 @@ var (
// configureSelfSignedJWT uses the private key in the service account to create
// a JWT without making a network call.
func configureSelfSignedJWT(f *credsfile.ServiceAccountFile, opts *DetectOptions) (auth.TokenProvider, error) {
+ if len(opts.scopes()) == 0 && opts.Audience == "" {
+ return nil, errors.New("credentials: both scopes and audience are empty")
+ }
pk, err := internal.ParseKey([]byte(f.PrivateKey))
if err != nil {
return nil, fmt.Errorf("credentials: could not parse key: %w", err)
diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
index aa8522510..2b8794dc2 100644
--- a/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/analyzer.go
@@ -1,11 +1,9 @@
package analyzer
import (
- "fmt"
"go/ast"
"go/token"
- "strconv"
- "strings"
+ "go/types"
"unicode"
"golang.org/x/tools/go/analysis"
@@ -23,86 +21,61 @@ func New() *analysis.Analyzer {
}
}
-type stringSet = map[string]struct{}
-
-var (
- importNodes = []ast.Node{(*ast.ImportSpec)(nil)}
- typeNodes = []ast.Node{(*ast.TypeSpec)(nil)}
- funcNodes = []ast.Node{(*ast.FuncDecl)(nil)}
-)
-
func run(pass *analysis.Pass) (interface{}, error) {
insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
- pkgAliases := map[string]string{}
- insp.Preorder(importNodes, func(node ast.Node) {
- i := node.(*ast.ImportSpec)
- if n := i.Name; n != nil && i.Path != nil {
- if path, err := strconv.Unquote(i.Path.Value); err == nil {
- pkgAliases[n.Name] = getPkgFromPath(path)
- }
- }
- })
-
- allTypes := stringSet{}
- typesSpecs := map[string]*ast.TypeSpec{}
- insp.Preorder(typeNodes, func(node ast.Node) {
- t := node.(*ast.TypeSpec)
- allTypes[t.Name.Name] = struct{}{}
- typesSpecs[t.Name.Name] = t
- })
-
- errorTypes := stringSet{}
- insp.Preorder(funcNodes, func(node ast.Node) {
- f := node.(*ast.FuncDecl)
- t, ok := isMethodError(f)
- if !ok {
- return
- }
- errorTypes[t] = struct{}{}
-
- tSpec, ok := typesSpecs[t]
- if !ok {
- panic(fmt.Sprintf("no specification for type %q", t))
- }
-
- if _, ok := tSpec.Type.(*ast.ArrayType); ok {
- if !isValidErrorArrayTypeName(t) {
- reportAboutErrorType(pass, tSpec.Pos(), t, true)
- }
- } else if !isValidErrorTypeName(t) {
- reportAboutErrorType(pass, tSpec.Pos(), t, false)
- }
- })
-
- errorFuncs := stringSet{}
- insp.Preorder(funcNodes, func(node ast.Node) {
- f := node.(*ast.FuncDecl)
- if isFuncReturningErr(f.Type, allTypes, errorTypes) {
- errorFuncs[f.Name.Name] = struct{}{}
+ insp.Nodes([]ast.Node{
+ (*ast.TypeSpec)(nil),
+ (*ast.ValueSpec)(nil),
+ (*ast.FuncDecl)(nil),
+ }, func(node ast.Node, push bool) bool {
+ if !push {
+ return false
}
- })
- inspectPkgLevelVarsOnly := func(node ast.Node) bool {
switch v := node.(type) {
case *ast.FuncDecl:
return false
case *ast.ValueSpec:
- if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) {
- reportAboutErrorVar(pass, v.Pos(), name)
+ if len(v.Names) != 1 {
+ return false
+ }
+ ident := v.Names[0]
+
+ if exprImplementsError(pass, ident) && !isValidErrorVarName(ident.Name) {
+ reportAboutSentinelError(pass, v.Pos(), ident.Name)
+ }
+ return false
+
+ case *ast.TypeSpec:
+ tt := pass.TypesInfo.TypeOf(v.Name)
+ if tt == nil {
+ return false
+ }
+ // NOTE(a.telyshev): Pointer is the hack against Error() method with pointer receiver.
+ if !typeImplementsError(types.NewPointer(tt)) {
+ return false
}
+
+ name := v.Name.Name
+ if _, ok := v.Type.(*ast.ArrayType); ok {
+ if !isValidErrorArrayTypeName(name) {
+ reportAboutArrayErrorType(pass, v.Pos(), name)
+ }
+ } else if !isValidErrorTypeName(name) {
+ reportAboutErrorType(pass, v.Pos(), name)
+ }
+ return false
}
+
return true
- }
- for _, f := range pass.Files {
- ast.Inspect(f, inspectPkgLevelVarsOnly)
- }
+ })
return nil, nil //nolint:nilnil
}
-func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) {
+func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) {
var form string
if unicode.IsLower([]rune(typeName)[0]) {
form = "xxxError"
@@ -110,26 +83,26 @@ func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName strin
form = "XxxError"
}
- if isArrayType {
- form += "s"
+ pass.Reportf(typePos, "the error type name `%s` should conform to the `%s` format", typeName, form)
+}
+
+func reportAboutArrayErrorType(pass *analysis.Pass, typePos token.Pos, typeName string) {
+ var forms string
+ if unicode.IsLower([]rune(typeName)[0]) {
+ forms = "`xxxErrors` or `xxxError`"
+ } else {
+ forms = "`XxxErrors` or `XxxError`"
}
- pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form)
+
+ pass.Reportf(typePos, "the error type name `%s` should conform to the %s format", typeName, forms)
}
-func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) {
+func reportAboutSentinelError(pass *analysis.Pass, pos token.Pos, varName string) {
var form string
if unicode.IsLower([]rune(varName)[0]) {
form = "errXxx"
} else {
form = "ErrXxx"
}
- pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form)
-}
-
-func getPkgFromPath(p string) string {
- idx := strings.LastIndex(p, "/")
- if idx == -1 {
- return p
- }
- return p[idx+1:]
+ pass.Reportf(pos, "the sentinel error name `%s` should conform to the `%s` format", varName, form)
}
diff --git a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
index 06f8d61d8..04e14fb68 100644
--- a/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
+++ b/vendor/github.com/Antonboom/errname/pkg/analyzer/facts.go
@@ -1,58 +1,22 @@
package analyzer
import (
- "fmt"
"go/ast"
- "go/token"
"go/types"
"strings"
"unicode"
-)
-
-func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) {
- if f.Recv == nil || len(f.Recv.List) != 1 {
- return "", false
- }
- if f.Name == nil || f.Name.Name != "Error" {
- return "", false
- }
- if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 {
- return "", false
- }
-
- returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident)
- if !ok {
- return "", false
- }
-
- var receiverType string
-
- unwrapIdentName := func(e ast.Expr) string {
- switch v := e.(type) {
- case *ast.Ident:
- return v.Name
- case *ast.IndexExpr:
- if i, ok := v.X.(*ast.Ident); ok {
- return i.Name
- }
- case *ast.IndexListExpr:
- if i, ok := v.X.(*ast.Ident); ok {
- return i.Name
- }
- }
- panic(fmt.Errorf("unsupported Error() receiver type %q", types.ExprString(e)))
- }
+ "golang.org/x/tools/go/analysis"
+)
- switch rt := f.Recv.List[0].Type; v := rt.(type) {
- case *ast.Ident, *ast.IndexExpr, *ast.IndexListExpr: // SomeError, SomeError[T], SomeError[T1, T2, ...]
- receiverType = unwrapIdentName(rt)
+var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
- case *ast.StarExpr: // *SomeError, *SomeError[T], *SomeError[T1, T2, ...]
- receiverType = unwrapIdentName(v.X)
- }
+func exprImplementsError(pass *analysis.Pass, e ast.Expr) bool {
+ return typeImplementsError(pass.TypesInfo.TypeOf(e))
+}
- return receiverType, returnType.Name == "string"
+func typeImplementsError(t types.Type) bool {
+ return t != nil && types.Implements(t, errorIface)
}
func isValidErrorTypeName(s string) bool {
@@ -77,153 +41,12 @@ func isValidErrorArrayTypeName(s string) bool {
words := split(s)
wordsCnt := wordsCount(words)
- if wordsCnt["errors"] != 1 {
- return false
- }
- return words[len(words)-1] == "errors"
-}
-
-func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool {
- if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 {
+ if wordsCnt["errors"] != 1 && wordsCnt["error"] != 1 {
return false
}
- var returnTypeName string
- switch rt := fType.Results.List[0].Type.(type) {
- case *ast.Ident:
- returnTypeName = rt.Name
- case *ast.StarExpr:
- if i, ok := rt.X.(*ast.Ident); ok {
- returnTypeName = i.Name
- }
- }
-
- return isErrorType(returnTypeName, allTypes, errorTypes)
-}
-
-func isErrorType(tName string, allTypes, errorTypes stringSet) bool {
- _, isUserType := allTypes[tName]
- _, isErrType := errorTypes[tName]
- return isErrType || (tName == "error" && !isUserType)
-}
-
-var knownErrConstructors = stringSet{
- "fmt.Errorf": {},
- "errors.Errorf": {},
- "errors.New": {},
- "errors.Newf": {},
- "errors.NewWithDepth": {},
- "errors.NewWithDepthf": {},
- "errors.NewAssertionErrorWithWrappedErrf": {},
-}
-
-func isSentinelError( //nolint:gocognit,gocyclo
- v *ast.ValueSpec,
- pkgAliases map[string]string,
- allTypes, errorTypes, errorFuncs stringSet,
-) (varName string, ok bool) {
- if len(v.Names) != 1 {
- return "", false
- }
- varName = v.Names[0].Name
-
- switch vv := v.Type.(type) {
- // var ErrEndOfFile error
- // var ErrEndOfFile SomeErrType
- case *ast.Ident:
- if isErrorType(vv.Name, allTypes, errorTypes) {
- return varName, true
- }
-
- // var ErrEndOfFile *SomeErrType
- case *ast.StarExpr:
- if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) {
- return varName, true
- }
- }
-
- if len(v.Values) != 1 {
- return "", false
- }
-
- switch vv := v.Values[0].(type) {
- case *ast.CallExpr:
- switch fun := vv.Fun.(type) {
- // var ErrEndOfFile = errors.New("end of file")
- case *ast.SelectorExpr:
- pkg, ok := fun.X.(*ast.Ident)
- if !ok {
- return "", false
- }
- pkgFun := fun.Sel
-
- pkgName := pkg.Name
- if a, ok := pkgAliases[pkgName]; ok {
- pkgName = a
- }
-
- _, ok = knownErrConstructors[pkgName+"."+pkgFun.Name]
- return varName, ok
-
- // var ErrEndOfFile = newErrEndOfFile()
- // var ErrEndOfFile = new(EndOfFileError)
- // const ErrEndOfFile = constError("end of file")
- // var statusCodeError = new(SomePtrError[string])
- case *ast.Ident:
- if isErrorType(fun.Name, allTypes, errorTypes) {
- return varName, true
- }
-
- if _, ok := errorFuncs[fun.Name]; ok {
- return varName, true
- }
-
- if fun.Name == "new" && len(vv.Args) == 1 {
- switch i := vv.Args[0].(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
-
- // var ErrEndOfFile = func() error { ... }
- case *ast.FuncLit:
- return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes)
- }
-
- // var ErrEndOfFile = &EndOfFileError{}
- // var ErrOK = &SomePtrError[string]{Code: "200 OK"}
- case *ast.UnaryExpr:
- if vv.Op == token.AND { // &
- if lit, ok := vv.X.(*ast.CompositeLit); ok {
- switch i := lit.Type.(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
- }
-
- // var ErrEndOfFile = EndOfFileError{}
- // var ErrNotFound = SomeError[string]{Code: "Not Found"}
- case *ast.CompositeLit:
- switch i := vv.Type.(type) {
- case *ast.Ident:
- return varName, isErrorType(i.Name, allTypes, errorTypes)
- case *ast.IndexExpr:
- if ii, ok := i.X.(*ast.Ident); ok {
- return varName, isErrorType(ii.Name, allTypes, errorTypes)
- }
- }
- }
-
- return "", false
+ lastWord := words[len(words)-1]
+ return lastWord == "errors" || lastWord == "error"
}
func isValidErrorVarName(s string) bool {
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
index 5646ee909..5507d9546 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/analyzer.go
@@ -15,7 +15,8 @@ const (
name = "nilnil"
doc = "Checks that there is no simultaneous return of `nil` error and an invalid value."
- reportMsg = "return both the `nil` error and invalid value: use a sentinel error instead"
+ nilNilReportMsg = "return both a `nil` error and an invalid value: use a sentinel error instead"
+ notNilNotNilReportMsg = "return both a non-nil error and a valid value: use separate returns instead"
)
// New returns new nilnil analyzer.
@@ -28,18 +29,22 @@ func New() *analysis.Analyzer {
Run: n.run,
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
- a.Flags.Var(&n.checkedTypes, "checked-types", "coma separated list")
+ a.Flags.Var(&n.checkedTypes, "checked-types", "comma separated list of return types to check")
+ a.Flags.BoolVar(&n.detectOpposite, "detect-opposite", false,
+ "in addition, detect opposite situation (simultaneous return of non-nil error and valid value)")
return a
}
type nilNil struct {
- checkedTypes checkedTypes
+ checkedTypes checkedTypes
+ detectOpposite bool
}
func newNilNil() *nilNil {
return &nilNil{
- checkedTypes: newDefaultCheckedTypes(),
+ checkedTypes: newDefaultCheckedTypes(),
+ detectOpposite: false,
}
}
@@ -87,22 +92,22 @@ func (n *nilNil) run(pass *analysis.Pass) (interface{}, error) {
}
ok, zv := n.isDangerNilType(fRes1Type)
- if !(ok && isErrorType(fRes2Type)) {
+ if !(ok && implementsError(fRes2Type)) {
return false
}
retVal, retErr := v.Results[0], v.Results[1]
- var needWarn bool
- switch zv {
- case zeroValueNil:
- needWarn = isNil(pass, retVal) && isNil(pass, retErr)
- case zeroValueZero:
- needWarn = isZero(retVal) && isNil(pass, retErr)
+ if ((zv == zeroValueNil) && isNil(pass, retVal) && isNil(pass, retErr)) ||
+ ((zv == zeroValueZero) && isZero(retVal) && isNil(pass, retErr)) {
+ pass.Reportf(v.Pos(), nilNilReportMsg)
+ return false
}
- if needWarn {
- pass.Reportf(v.Pos(), reportMsg)
+ if n.detectOpposite && (((zv == zeroValueNil) && !isNil(pass, retVal) && !isNil(pass, retErr)) ||
+ ((zv == zeroValueZero) && !isZero(retVal) && !isNil(pass, retErr))) {
+ pass.Reportf(v.Pos(), notNilNotNilReportMsg)
+ return false
}
}
@@ -152,7 +157,7 @@ func (n *nilNil) isDangerNilType(t types.Type) (bool, zeroValue) {
var errorIface = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
-func isErrorType(t types.Type) bool {
+func implementsError(t types.Type) bool {
_, ok := t.Underlying().(*types.Interface)
return ok && types.Implements(t, errorIface)
}
diff --git a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
index c9b8e3eed..90ae548f3 100644
--- a/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
+++ b/vendor/github.com/Antonboom/nilnil/pkg/analyzer/config.go
@@ -8,11 +8,11 @@ import (
func newDefaultCheckedTypes() checkedTypes {
return checkedTypes{
- ptrType: {},
+ chanType: {},
funcType: {},
ifaceType: {},
mapType: {},
- chanType: {},
+ ptrType: {},
uintptrType: {},
unsafeptrType: {},
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go
new file mode 100644
index 000000000..cafc283e6
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/encoded.go
@@ -0,0 +1,46 @@
+package analysisutil
+
+import "strings"
+
+var whitespaceRemover = strings.NewReplacer("\n", "", "\\n", "", "\t", "", "\\t", "", " ", "")
+
+// IsJSONLike returns true if the string has JSON format features.
+// A positive result can be returned for invalid JSON as well.
+func IsJSONLike(s string) bool {
+ s = whitespaceRemover.Replace(unescape(s))
+
+ var startMatch bool
+ for _, prefix := range []string{
+ `{{`, `{[`, `{"`,
+ `[{{`, `[{[`, `[{"`,
+ } {
+ if strings.HasPrefix(s, prefix) {
+ startMatch = true
+ break
+ }
+ }
+ if !startMatch {
+ return false
+ }
+
+ for _, keyValue := range []string{`":{`, `":[`, `":"`} {
+ if strings.Contains(s, keyValue) {
+ return true
+ }
+ }
+ return false
+
+ // NOTE(a.telyshev): We do not check the end of the string, because this is usually a field for typos.
+ // And one of the reasons for using JSON-specific assertions is to catch typos like this.
+}
+
+func unescape(s string) string {
+ s = strings.ReplaceAll(s, `\"`, `"`)
+ s = unquote(s, `"`)
+ s = unquote(s, "`")
+ return s
+}
+
+func unquote(s string, q string) string {
+ return strings.TrimLeft(strings.TrimRight(s, q), q)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
index 3fc1f42b8..d55260918 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/analysisutil/file.go
@@ -2,6 +2,7 @@ package analysisutil
import (
"go/ast"
+ "slices"
"strconv"
)
@@ -17,11 +18,8 @@ func Imports(file *ast.File, pkgs ...string) bool {
if err != nil {
continue
}
- // NOTE(a.telyshev): Don't use `slices.Contains` to keep the minimum module version 1.20.
- for _, pkg := range pkgs { // Small O(n).
- if pkg == path {
- return true
- }
+ if slices.Contains(pkgs, path) { // Small O(n).
+ return true
}
}
return false
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
index 403691e27..56cd64e07 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/blank_import.go
@@ -53,7 +53,7 @@ func (checker BlankImport) Check(pass *analysis.Pass, _ *inspector.Inspector) (d
}
msg := fmt.Sprintf("avoid blank import of %s as it does nothing", pkg)
- diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg, nil))
+ diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), imp, msg))
}
}
return diagnostics
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
index d125c43f9..67959b633 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/bool_compare.go
@@ -49,13 +49,11 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
}
survivingArg = newBoolCast(survivingArg)
}
- return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
- Pos: replaceStart,
- End: replaceEnd,
- NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed, analysis.TextEdit{
+ Pos: replaceStart,
+ End: replaceEnd,
+ NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
+ })
}
newUseTrueDiagnostic := func(survivingArg ast.Expr, replaceStart, replaceEnd token.Pos) *analysis.Diagnostic {
@@ -74,7 +72,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
survivingArg = newBoolCast(survivingArg)
}
return newDiagnostic(checker.Name(), call, "need to simplify the assertion",
- &analysis.SuggestedFix{
+ analysis.SuggestedFix{
Message: "Simplify the assertion",
TextEdits: []analysis.TextEdit{{
Pos: replaceStart,
@@ -106,7 +104,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
case xor(t1, t2):
survivingArg, _ := anyVal([]bool{t1, t2}, arg2, arg1)
if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
- // NOTE(a.telyshev): `Exactly` assumes no type casting.
+ // NOTE(a.telyshev): `Exactly` assumes no type conversion.
return nil
}
return newUseTrueDiagnostic(survivingArg, arg1.Pos(), arg2.End())
@@ -114,7 +112,7 @@ func (checker BoolCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.
case xor(f1, f2):
survivingArg, _ := anyVal([]bool{f1, f2}, arg2, arg1)
if call.Fn.NameFTrimmed == "Exactly" && !isBuiltinBool(pass, survivingArg) {
- // NOTE(a.telyshev): `Exactly` assumes no type casting.
+ // NOTE(a.telyshev): `Exactly` assumes no type conversion.
return nil
}
return newUseFalseDiagnostic(survivingArg, arg1.Pos(), arg2.End())
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
index 17c7d14ee..f881be4f2 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/checkers_registry.go
@@ -13,10 +13,13 @@ var registry = checkersRegistry{
{factory: asCheckerFactory(NewLen), enabledByDefault: true},
{factory: asCheckerFactory(NewNegativePositive), enabledByDefault: true},
{factory: asCheckerFactory(NewCompares), enabledByDefault: true},
+ {factory: asCheckerFactory(NewContains), enabledByDefault: true},
{factory: asCheckerFactory(NewErrorNil), enabledByDefault: true},
{factory: asCheckerFactory(NewNilCompare), enabledByDefault: true},
{factory: asCheckerFactory(NewErrorIsAs), enabledByDefault: true},
+ {factory: asCheckerFactory(NewEncodedCompare), enabledByDefault: true},
{factory: asCheckerFactory(NewExpectedActual), enabledByDefault: true},
+ {factory: asCheckerFactory(NewRegexp), enabledByDefault: true},
{factory: asCheckerFactory(NewSuiteExtraAssertCall), enabledByDefault: true},
{factory: asCheckerFactory(NewSuiteDontUsePkg), enabledByDefault: true},
{factory: asCheckerFactory(NewUselessAssert), enabledByDefault: true},
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
index bdde03d95..f0c4013f1 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/compares.go
@@ -61,7 +61,9 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
return nil
}
- if isPointer(pass, be.X) && isPointer(pass, be.Y) {
+ _, xp := isPointer(pass, be.X)
+ _, yp := isPointer(pass, be.Y)
+ if xp && yp {
switch proposedFn {
case "Equal":
proposedFn = "Same"
@@ -72,12 +74,11 @@ func (checker Compares) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
a, b := be.X, be.Y
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: be.X.Pos(),
End: be.Y.End(),
NewText: formatAsCallArgs(pass, a, b),
- }),
- )
+ })
}
var tokenToProposedFnInsteadOfTrue = map[token.Token]string{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go
new file mode 100644
index 000000000..07f76c6e4
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/contains.go
@@ -0,0 +1,71 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// Contains detects situations like
+//
+// assert.True(t, strings.Contains(a, "abc123"))
+// assert.False(t, !strings.Contains(a, "abc123"))
+//
+// assert.False(t, strings.Contains(a, "abc123"))
+// assert.True(t, !strings.Contains(a, "abc123"))
+//
+// and requires
+//
+// assert.Contains(t, a, "abc123")
+// assert.NotContains(t, a, "abc123")
+type Contains struct{}
+
+// NewContains constructs Contains checker.
+func NewContains() Contains { return Contains{} }
+func (Contains) Name() string { return "contains" }
+
+func (checker Contains) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ if len(call.Args) < 1 {
+ return nil
+ }
+
+ expr := call.Args[0]
+ unpacked, isNeg := isNegation(expr)
+ if isNeg {
+ expr = unpacked
+ }
+
+ ce, ok := expr.(*ast.CallExpr)
+ if !ok || len(ce.Args) != 2 {
+ return nil
+ }
+
+ if !isStringsContainsCall(pass, ce) {
+ return nil
+ }
+
+ var proposed string
+ switch call.Fn.NameFTrimmed {
+ default:
+ return nil
+
+ case "True":
+ proposed = "Contains"
+ if isNeg {
+ proposed = "NotContains"
+ }
+
+ case "False":
+ proposed = "NotContains"
+ if isNeg {
+ proposed = "Contains"
+ }
+ }
+
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+ analysis.TextEdit{
+ Pos: call.Args[0].Pos(),
+ End: call.Args[0].End(),
+ NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
+ })
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
index eafecb678..71657fe11 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/empty.go
@@ -53,25 +53,28 @@ func (checker Empty) checkEmpty(pass *analysis.Pass, call *CallMeta) *analysis.D
newUseEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Empty"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
if len(call.Args) == 0 {
return nil
}
-
a := call.Args[0]
+
switch call.Fn.NameFTrimmed {
- case "Zero", "Empty":
- lenArg, ok := isBuiltinLenCall(pass, a)
- if ok {
+ case "Zero":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
return newUseEmptyDiagnostic(a.Pos(), a.End(), lenArg)
}
+
+ case "Empty":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg)
+ }
}
if len(call.Args) < 2 {
@@ -120,25 +123,28 @@ func (checker Empty) checkNotEmpty(pass *analysis.Pass, call *CallMeta) *analysi
newUseNotEmptyDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "NotEmpty"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
if len(call.Args) == 0 {
return nil
}
-
a := call.Args[0]
+
switch call.Fn.NameFTrimmed {
- case "NotZero", "NotEmpty", "Positive":
- lenArg, ok := isBuiltinLenCall(pass, a)
- if ok {
+ case "NotZero", "Positive":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
return newUseNotEmptyDiagnostic(a.Pos(), a.End(), lenArg)
}
+
+ case "NotEmpty":
+ if lenArg, ok := isBuiltinLenCall(pass, a); ok {
+ return newRemoveLenDiagnostic(pass, checker.Name(), call, a, lenArg)
+ }
}
if len(call.Args) < 2 {
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go
new file mode 100644
index 000000000..53c74ac45
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/encoded_compare.go
@@ -0,0 +1,101 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// EncodedCompare detects situations like
+//
+// assert.Equal(t, `{"foo": "bar"}`, body)
+// assert.EqualValues(t, `{"foo": "bar"}`, body)
+// assert.Exactly(t, `{"foo": "bar"}`, body)
+// assert.Equal(t, expectedJSON, resultJSON)
+// assert.Equal(t, expBodyConst, w.Body.String())
+// assert.Equal(t, fmt.Sprintf(`{"value":"%s"}`, hexString), result)
+// assert.Equal(t, "{}", json.RawMessage(resp))
+// assert.Equal(t, expJSON, strings.Trim(string(resultJSONBytes), "\n")) // + Replace, ReplaceAll, TrimSpace
+//
+// assert.Equal(t, expectedYML, conf)
+//
+// and requires
+//
+// assert.JSONEq(t, `{"foo": "bar"}`, body)
+// assert.YAMLEq(t, expectedYML, conf)
+type EncodedCompare struct{}
+
+// NewEncodedCompare constructs EncodedCompare checker.
+func NewEncodedCompare() EncodedCompare { return EncodedCompare{} }
+func (EncodedCompare) Name() string { return "encoded-compare" }
+
+func (checker EncodedCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ switch call.Fn.NameFTrimmed {
+ case "Equal", "EqualValues", "Exactly":
+ default:
+ return nil
+ }
+
+ if len(call.Args) < 2 {
+ return nil
+ }
+ lhs, rhs := call.Args[0], call.Args[1]
+
+ a, aIsExplicitJSON := checker.unwrap(pass, call.Args[0])
+ b, bIsExplicitJSON := checker.unwrap(pass, call.Args[1])
+
+ var proposed string
+ switch {
+ case aIsExplicitJSON, bIsExplicitJSON, isJSONStyleExpr(pass, a), isJSONStyleExpr(pass, b):
+ proposed = "JSONEq"
+ case isYAMLStyleExpr(a), isYAMLStyleExpr(b):
+ proposed = "YAMLEq"
+ }
+
+ if proposed != "" {
+ return newUseFunctionDiagnostic(checker.Name(), call, proposed,
+ analysis.TextEdit{
+ Pos: lhs.Pos(),
+ End: lhs.End(),
+ NewText: formatWithStringCastForBytes(pass, a),
+ },
+ analysis.TextEdit{
+ Pos: rhs.Pos(),
+ End: rhs.End(),
+ NewText: formatWithStringCastForBytes(pass, b),
+ },
+ )
+ }
+ return nil
+}
+
+// unwrap unwraps expression from string, []byte, strings.Replace(All), strings.Trim(Space) and json.RawMessage conversions.
+// Returns true in the second argument, if json.RawMessage was in the chain.
+func (checker EncodedCompare) unwrap(pass *analysis.Pass, e ast.Expr) (ast.Expr, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return e, false
+ }
+ if len(ce.Args) == 0 {
+ return e, false
+ }
+
+ if isJSONRawMessageCast(pass, ce) {
+ if isNil(ce.Args[0]) { // NOTE(a.telyshev): Ignore json.RawMessage(nil) case.
+ return checker.unwrap(pass, ce.Args[0])
+ }
+
+ v, _ := checker.unwrap(pass, ce.Args[0])
+ return v, true
+ }
+
+ if isIdentWithName("string", ce.Fun) ||
+ isByteArray(ce.Fun) ||
+ isStringsReplaceCall(pass, ce) ||
+ isStringsReplaceAllCall(pass, ce) ||
+ isStringsTrimCall(pass, ce) ||
+ isStringsTrimSpaceCall(pass, ce) {
+ return checker.unwrap(pass, ce.Args[0])
+ }
+ return e, false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
index ab92c2ec0..f2812c939 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_is_as.go
@@ -67,12 +67,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
}
if proposed != "" {
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: ce.Pos(),
End: ce.End(),
NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
- }),
- )
+ })
}
case "False":
@@ -91,12 +90,11 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
if isErrorsIsCall(pass, ce) {
const proposed = "NotErrorIs"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: ce.Pos(),
End: ce.End(),
NewText: formatAsCallArgs(pass, ce.Args[0], ce.Args[1]),
- }),
- )
+ })
}
case "ErrorAs":
@@ -127,15 +125,15 @@ func (checker ErrorIsAs) Check(pass *analysis.Pass, call *CallMeta) *analysis.Di
pt, ok := tv.Type.Underlying().(*types.Pointer)
if !ok {
- return newDiagnostic(checker.Name(), call, defaultReport, nil)
+ return newDiagnostic(checker.Name(), call, defaultReport)
}
if pt.Elem() == errorType {
- return newDiagnostic(checker.Name(), call, errorPtrReport, nil)
+ return newDiagnostic(checker.Name(), call, errorPtrReport)
}
_, isInterface := pt.Elem().Underlying().(*types.Interface)
if !isInterface && !types.Implements(pt.Elem(), errorIface) {
- return newDiagnostic(checker.Name(), call, defaultReport, nil)
+ return newDiagnostic(checker.Name(), call, defaultReport)
}
}
return nil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
index 1e56d222a..b9f28df21 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/error_nil.go
@@ -12,12 +12,16 @@ import (
// ErrorNil detects situations like
//
// assert.Nil(t, err)
-// assert.NotNil(t, err)
+// assert.Empty(t, err)
+// assert.Zero(t, err)
// assert.Equal(t, nil, err)
// assert.EqualValues(t, nil, err)
// assert.Exactly(t, nil, err)
// assert.ErrorIs(t, err, nil)
//
+// assert.NotNil(t, err)
+// assert.NotEmpty(t, err)
+// assert.NotZero(t, err)
// assert.NotEqual(t, nil, err)
// assert.NotEqualValues(t, nil, err)
// assert.NotErrorIs(t, err, nil)
@@ -40,12 +44,12 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
proposedFn, survivingArg, replacementEndPos := func() (string, ast.Expr, token.Pos) {
switch call.Fn.NameFTrimmed {
- case "Nil":
+ case "Nil", "Empty", "Zero":
if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
return noErrorFn, call.Args[0], call.Args[0].End()
}
- case "NotNil":
+ case "NotNil", "NotEmpty", "NotZero":
if len(call.Args) >= 1 && isError(pass, call.Args[0]) {
return errorFn, call.Args[0], call.Args[0].End()
}
@@ -81,12 +85,11 @@ func (checker ErrorNil) Check(pass *analysis.Pass, call *CallMeta) *analysis.Dia
if proposedFn != "" {
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: call.Args[0].Pos(),
End: replacementEndPos,
NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ })
}
return nil
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
index 77784dc7b..351d675ce 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/expected_actual.go
@@ -87,7 +87,7 @@ func (checker ExpectedActual) Check(pass *analysis.Pass, call *CallMeta) *analys
first, second := call.Args[0], call.Args[1]
if checker.isWrongExpectedActualOrder(pass, first, second) {
- return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, "need to reverse actual and expected values", analysis.SuggestedFix{
Message: "Reverse actual and expected values",
TextEdits: []analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
index 7436f9ca1..6bc22cd02 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/float_compare.go
@@ -44,7 +44,7 @@ func (checker FloatCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis
if call.Fn.IsFmt {
format = "use %s.InEpsilonf (or InDeltaf)"
}
- return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr), nil)
+ return newDiagnostic(checker.Name(), call, fmt.Sprintf(format, call.SelectorXStr))
}
return nil
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go
index 3401bb097..896b6bf5f 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/formatter.go
@@ -1,8 +1,6 @@
package checkers
import (
- "fmt"
- "go/ast"
"go/types"
"strconv"
@@ -60,7 +58,7 @@ func (checker Formatter) Check(pass *analysis.Pass, call *CallMeta) (result *ana
}
func (checker Formatter) checkNotFmtAssertion(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
- msgAndArgsPos, ok := isPrintfLikeCall(pass, call, call.Fn.Signature)
+ msgAndArgsPos, ok := isPrintfLikeCall(pass, call)
if !ok {
return nil
}
@@ -71,21 +69,15 @@ func (checker Formatter) checkNotFmtAssertion(pass *analysis.Pass, call *CallMet
msgAndArgs := call.ArgsRaw[msgAndArgsPos]
if args, ok := isFmtSprintfCall(pass, msgAndArgs); ok {
if checker.requireFFuncs {
- msg := fmt.Sprintf("remove unnecessary fmt.Sprintf and use %s.%s", call.SelectorXStr, fFunc)
- return newDiagnostic(checker.Name(), call, msg,
- newSuggestedFuncReplacement(call, fFunc, analysis.TextEdit{
- Pos: msgAndArgs.Pos(),
- End: msgAndArgs.End(),
- NewText: formatAsCallArgs(pass, args...),
- }),
- )
+ return newRemoveFnAndUseDiagnostic(pass, checker.Name(), call, fFunc,
+ "fmt.Sprintf", msgAndArgs, args...)
}
return newRemoveSprintfDiagnostic(pass, checker.Name(), call, msgAndArgs, args)
}
}
if checker.requireFFuncs {
- return newUseFunctionDiagnostic(checker.Name(), call, fFunc, newSuggestedFuncReplacement(call, fFunc))
+ return newUseFunctionDiagnostic(checker.Name(), call, fFunc)
}
return nil
}
@@ -109,7 +101,7 @@ func (checker Formatter) checkFmtAssertion(pass *analysis.Pass, call *CallMeta)
defer func() { pass.Report = report }()
pass.Report = func(d analysis.Diagnostic) {
- result = newDiagnostic(checker.Name(), call, d.Message, nil)
+ result = newDiagnostic(checker.Name(), call, d.Message)
}
format, err := strconv.Unquote(analysisutil.NodeString(pass.Fset, msg))
@@ -121,21 +113,51 @@ func (checker Formatter) checkFmtAssertion(pass *analysis.Pass, call *CallMeta)
return result
}
-func isPrintfLikeCall(pass *analysis.Pass, call *CallMeta, sig *types.Signature) (int, bool) {
- msgAndArgsPos := getMsgAndArgsPosition(sig)
+func isPrintfLikeCall(pass *analysis.Pass, call *CallMeta) (int, bool) {
+ msgAndArgsPos := getMsgAndArgsPosition(call.Fn.Signature)
if msgAndArgsPos < 0 {
return -1, false
}
- fmtFn := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, call.Fn.Name+"f")
- if fmtFn == nil {
- // NOTE(a.telyshev): No formatted analogue of assertion.
+ if !assertHasFormattedAnalogue(pass, call) {
return -1, false
}
return msgAndArgsPos, len(call.ArgsRaw) > msgAndArgsPos
}
+func assertHasFormattedAnalogue(pass *analysis.Pass, call *CallMeta) bool {
+ if fn := analysisutil.ObjectOf(pass.Pkg, testify.AssertPkgPath, call.Fn.Name+"f"); fn != nil {
+ return true
+ }
+
+ if fn := analysisutil.ObjectOf(pass.Pkg, testify.RequirePkgPath, call.Fn.Name+"f"); fn != nil {
+ return true
+ }
+
+ recv := call.Fn.Signature.Recv()
+ if recv == nil {
+ return false
+ }
+
+ recvT := recv.Type()
+ if ptr, ok := recv.Type().(*types.Pointer); ok {
+ recvT = ptr.Elem()
+ }
+
+ suite, ok := recvT.(*types.Named)
+ if !ok {
+ return false
+ }
+ for i := 0; i < suite.NumMethods(); i++ {
+ if suite.Method(i).Name() == call.Fn.Name+"f" {
+ return true
+ }
+ }
+
+ return false
+}
+
func getMsgAndArgsPosition(sig *types.Signature) int {
params := sig.Params()
if params.Len() < 1 {
@@ -162,26 +184,3 @@ func getMsgPosition(sig *types.Signature) int {
}
return -1
}
-
-func isFmtSprintfCall(pass *analysis.Pass, expr ast.Expr) ([]ast.Expr, bool) {
- ce, ok := expr.(*ast.CallExpr)
- if !ok {
- return nil, false
- }
-
- se, ok := ce.Fun.(*ast.SelectorExpr)
- if !ok {
- return nil, false
- }
-
- sprintfObj := analysisutil.ObjectOf(pass.Pkg, "fmt", "Sprintf")
- if sprintfObj == nil {
- return nil, false
- }
-
- if !analysisutil.IsObj(pass.TypesInfo, se.Sel, sprintfObj) {
- return nil, false
- }
-
- return ce.Args, true
-}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
index 060c96033..8b0d39999 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/go_require.go
@@ -142,11 +142,11 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect
if testifyCall != nil {
switch checker.checkCall(testifyCall) {
case goRequireVerdictRequire:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, "require"))
diagnostics = append(diagnostics, *d)
case goRequireVerdictAssertFailNow:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireCallReportFormat, testifyCall))
diagnostics = append(diagnostics, *d)
case goRequireVerdictNoExit:
@@ -163,7 +163,7 @@ func (checker GoRequire) Check(pass *analysis.Pass, inspector *inspector.Inspect
if v := checker.checkFunc(pass, calledFd, testsDecls, processedFuncs); v != goRequireVerdictNoExit {
caller := analysisutil.NodeString(pass.Fset, ce.Fun)
- d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller), nil)
+ d := newDiagnostic(checker.Name(), ce, fmt.Sprintf(goRequireFnReportFormat, caller))
diagnostics = append(diagnostics, *d)
}
}
@@ -198,11 +198,11 @@ func (checker GoRequire) checkHTTPHandlers(pass *analysis.Pass, insp *inspector.
switch checker.checkCall(testifyCall) {
case goRequireVerdictRequire:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require"), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, "require"))
diagnostics = append(diagnostics, *d)
case goRequireVerdictAssertFailNow:
- d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall), nil)
+ d := newDiagnostic(checker.Name(), testifyCall, fmt.Sprintf(goRequireHTTPHandlerReportFormat, testifyCall))
diagnostics = append(diagnostics, *d)
case goRequireVerdictNoExit:
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
index 432a3032c..9b43e914c 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_basic_type.go
@@ -1,10 +1,10 @@
package checkers
import (
- "fmt"
"go/ast"
"go/token"
"go/types"
+ "strconv"
"golang.org/x/tools/go/analysis"
)
@@ -56,9 +56,29 @@ func isTypedIntNumber(e ast.Expr, v int, types ...string) bool {
return false
}
-func isIntNumber(e ast.Expr, v int) bool {
+func isIntNumber(e ast.Expr, rhs int) bool {
+ lhs, ok := isIntBasicLit(e)
+ return ok && (lhs == rhs)
+}
+
+func isNegativeIntNumber(e ast.Expr) bool {
+ v, ok := isIntBasicLit(e)
+ return ok && v < 0
+}
+
+func isPositiveIntNumber(e ast.Expr) bool {
+ v, ok := isIntBasicLit(e)
+ return ok && v > 0
+}
+
+func isEmptyStringLit(e ast.Expr) bool {
+ bl, ok := e.(*ast.BasicLit)
+ return ok && bl.Kind == token.STRING && bl.Value == `""`
+}
+
+func isNotEmptyStringLit(e ast.Expr) bool {
bl, ok := e.(*ast.BasicLit)
- return ok && bl.Kind == token.INT && bl.Value == fmt.Sprintf("%d", v)
+ return ok && bl.Kind == token.STRING && bl.Value != `""`
}
func isBasicLit(e ast.Expr) bool {
@@ -66,9 +86,27 @@ func isBasicLit(e ast.Expr) bool {
return ok
}
-func isIntBasicLit(e ast.Expr) bool {
+func isIntBasicLit(e ast.Expr) (int, bool) {
+ if un, ok := e.(*ast.UnaryExpr); ok {
+ if un.Op == token.SUB {
+ v, ok := isIntBasicLit(un.X)
+ return -1 * v, ok
+ }
+ }
+
bl, ok := e.(*ast.BasicLit)
- return ok && bl.Kind == token.INT
+ if !ok {
+ return 0, false
+ }
+ if bl.Kind != token.INT {
+ return 0, false
+ }
+
+ v, err := strconv.Atoi(bl.Value)
+ if err != nil {
+ return 0, false
+ }
+ return v, true
}
func isUntypedConst(pass *analysis.Pass, e ast.Expr) bool {
@@ -98,12 +136,37 @@ func isUnderlying(pass *analysis.Pass, e ast.Expr, flag types.BasicInfo) bool {
return ok && (bt.Info()&flag > 0)
}
-func isPointer(pass *analysis.Pass, e ast.Expr) bool {
- _, ok := pass.TypesInfo.TypeOf(e).(*types.Pointer)
- return ok
+func isPointer(pass *analysis.Pass, e ast.Expr) (types.Type, bool) {
+ ptr, ok := pass.TypesInfo.TypeOf(e).(*types.Pointer)
+ if !ok {
+ return nil, false
+ }
+ return ptr.Elem(), true
+}
+
+// isByteArray returns true if expression is `[]byte` itself.
+func isByteArray(e ast.Expr) bool {
+ at, ok := e.(*ast.ArrayType)
+ return ok && isIdentWithName("byte", at.Elt)
+}
+
+// hasBytesType returns true if the expression is of `[]byte` type.
+func hasBytesType(pass *analysis.Pass, e ast.Expr) bool {
+ t := pass.TypesInfo.TypeOf(e)
+ if t == nil {
+ return false
+ }
+
+ sl, ok := t.(*types.Slice)
+ if !ok {
+ return false
+ }
+
+ el, ok := sl.Elem().(*types.Basic)
+ return ok && el.Kind() == types.Uint8
}
-// untype returns v from type(v) expression or v itself if there is no type cast.
+// untype returns v from type(v) expression or v itself if there is no type conversion.
func untype(e ast.Expr) ast.Expr {
ce, ok := e.(*ast.CallExpr)
if !ok || len(ce.Args) != 1 {
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
index 3ae88a560..f12d87aa3 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_diagnostic.go
@@ -7,11 +7,32 @@ import (
"golang.org/x/tools/go/analysis"
)
+func newRemoveFnAndUseDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ proposedFn string,
+ removedFn string,
+ removedFnPos analysis.Range,
+ removedFnArgs ...ast.Expr,
+) *analysis.Diagnostic {
+ f := proposedFn
+ if call.Fn.IsFmt {
+ f += "f"
+ }
+ msg := fmt.Sprintf("remove unnecessary %s and use %s.%s", removedFn, call.SelectorXStr, f)
+
+ return newDiagnostic(checker, call, msg,
+ newSuggestedFuncRemoving(pass, removedFn, removedFnPos, removedFnArgs...),
+ newSuggestedFuncReplacement(call, proposedFn),
+ )
+}
+
func newUseFunctionDiagnostic(
checker string,
call *CallMeta,
proposedFn string,
- fix *analysis.SuggestedFix,
+ additionalEdits ...analysis.TextEdit,
) *analysis.Diagnostic {
f := proposedFn
if call.Fn.IsFmt {
@@ -19,33 +40,57 @@ func newUseFunctionDiagnostic(
}
msg := fmt.Sprintf("use %s.%s", call.SelectorXStr, f)
- return newDiagnostic(checker, call, msg, fix)
+ return newDiagnostic(checker, call, msg,
+ newSuggestedFuncReplacement(call, proposedFn, additionalEdits...))
+}
+
+func newRemoveLenDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnPos analysis.Range,
+ fnArg ast.Expr,
+) *analysis.Diagnostic {
+ return newRemoveFnDiagnostic(pass, checker, call, "len", fnPos, fnArg)
+}
+
+func newRemoveMustCompileDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnPos analysis.Range,
+ fnArg ast.Expr,
+) *analysis.Diagnostic {
+ return newRemoveFnDiagnostic(pass, checker, call, "regexp.MustCompile", fnPos, fnArg)
}
func newRemoveSprintfDiagnostic(
pass *analysis.Pass,
checker string,
call *CallMeta,
- sprintfPos analysis.Range,
- sprintfArgs []ast.Expr,
+ fnPos analysis.Range,
+ fnArgs []ast.Expr,
) *analysis.Diagnostic {
- return newDiagnostic(checker, call, "remove unnecessary fmt.Sprintf", &analysis.SuggestedFix{
- Message: "Remove `fmt.Sprintf`",
- TextEdits: []analysis.TextEdit{
- {
- Pos: sprintfPos.Pos(),
- End: sprintfPos.End(),
- NewText: formatAsCallArgs(pass, sprintfArgs...),
- },
- },
- })
+ return newRemoveFnDiagnostic(pass, checker, call, "fmt.Sprintf", fnPos, fnArgs...)
+}
+
+func newRemoveFnDiagnostic(
+ pass *analysis.Pass,
+ checker string,
+ call *CallMeta,
+ fnName string,
+ fnPos analysis.Range,
+ fnArgs ...ast.Expr,
+) *analysis.Diagnostic {
+ return newDiagnostic(checker, call, "remove unnecessary "+fnName,
+ newSuggestedFuncRemoving(pass, fnName, fnPos, fnArgs...))
}
func newDiagnostic(
checker string,
rng analysis.Range,
msg string,
- fix *analysis.SuggestedFix,
+ fixes ...analysis.SuggestedFix,
) *analysis.Diagnostic {
d := analysis.Diagnostic{
Pos: rng.Pos(),
@@ -53,21 +98,39 @@ func newDiagnostic(
Category: checker,
Message: checker + ": " + msg,
}
- if fix != nil {
- d.SuggestedFixes = []analysis.SuggestedFix{*fix}
+ if len(fixes) != 0 {
+ d.SuggestedFixes = fixes
}
return &d
}
+func newSuggestedFuncRemoving(
+ pass *analysis.Pass,
+ fnName string,
+ fnPos analysis.Range,
+ fnArgs ...ast.Expr,
+) analysis.SuggestedFix {
+ return analysis.SuggestedFix{
+ Message: fmt.Sprintf("Remove `%s`", fnName),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: fnPos.Pos(),
+ End: fnPos.End(),
+ NewText: formatAsCallArgs(pass, fnArgs...),
+ },
+ },
+ }
+}
+
func newSuggestedFuncReplacement(
call *CallMeta,
proposedFn string,
additionalEdits ...analysis.TextEdit,
-) *analysis.SuggestedFix {
+) analysis.SuggestedFix {
if call.Fn.IsFmt {
proposedFn += "f"
}
- return &analysis.SuggestedFix{
+ return analysis.SuggestedFix{
Message: fmt.Sprintf("Replace `%s` with `%s`", call.Fn.Name, proposedFn),
TextEdits: append([]analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go
new file mode 100644
index 000000000..35a497a72
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_encoded.go
@@ -0,0 +1,40 @@
+package checkers
+
+import (
+ "go/ast"
+ "go/token"
+ "regexp"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+var (
+ jsonIdentRe = regexp.MustCompile(`json|JSON|Json`)
+ yamlIdentRe = regexp.MustCompile(`yaml|YAML|Yaml|yml|YML|Yml`)
+)
+
+func isJSONStyleExpr(pass *analysis.Pass, e ast.Expr) bool {
+ if isIdentNamedAfterPattern(jsonIdentRe, e) {
+ return true
+ }
+
+ if t, ok := pass.TypesInfo.Types[e]; ok && t.Value != nil {
+ return analysisutil.IsJSONLike(t.Value.String())
+ }
+
+ if bl, ok := e.(*ast.BasicLit); ok {
+ return bl.Kind == token.STRING && analysisutil.IsJSONLike(bl.Value)
+ }
+
+ if args, ok := isFmtSprintfCall(pass, e); ok {
+ return isJSONStyleExpr(pass, args[0])
+ }
+
+ return false
+}
+
+func isYAMLStyleExpr(e ast.Expr) bool {
+ return isIdentNamedAfterPattern(yamlIdentRe, e)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
index 55cd5fd05..859a39ee8 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_error.go
@@ -5,8 +5,6 @@ import (
"go/types"
"golang.org/x/tools/go/analysis"
-
- "github.com/Antonboom/testifylint/internal/analysisutil"
)
var (
@@ -20,23 +18,9 @@ func isError(pass *analysis.Pass, expr ast.Expr) bool {
}
func isErrorsIsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
- return isErrorsPkgFnCall(pass, ce, "Is")
+ return isPkgFnCall(pass, ce, "errors", "Is")
}
func isErrorsAsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
- return isErrorsPkgFnCall(pass, ce, "As")
-}
-
-func isErrorsPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, fn string) bool {
- se, ok := ce.Fun.(*ast.SelectorExpr)
- if !ok {
- return false
- }
-
- errorsIsObj := analysisutil.ObjectOf(pass.Pkg, "errors", fn)
- if errorsIsObj == nil {
- return false
- }
-
- return analysisutil.IsObj(pass.TypesInfo, se.Sel, errorsIsObj)
+ return isPkgFnCall(pass, ce, "errors", "As")
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
index 765fce527..d69c42860 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_format.go
@@ -3,6 +3,7 @@ package checkers
import (
"bytes"
"go/ast"
+ "strings"
"golang.org/x/tools/go/analysis"
@@ -24,3 +25,37 @@ func formatAsCallArgs(pass *analysis.Pass, args ...ast.Expr) []byte {
}
return buf.Bytes()
}
+
+func formatWithStringCastForBytes(pass *analysis.Pass, e ast.Expr) []byte {
+ if !hasBytesType(pass, e) {
+ return analysisutil.NodeBytes(pass.Fset, e)
+ }
+
+ if se, ok := isBufferBytesCall(pass, e); ok {
+ return []byte(analysisutil.NodeString(pass.Fset, se) + ".String()")
+ }
+ return []byte("string(" + analysisutil.NodeString(pass.Fset, e) + ")")
+}
+
+func isBufferBytesCall(pass *analysis.Pass, e ast.Expr) (ast.Node, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return nil, false
+ }
+
+ if !isIdentWithName("Bytes", se.Sel) {
+ return nil, false
+ }
+ if t := pass.TypesInfo.TypeOf(se.X); t != nil {
+ // NOTE(a.telyshev): This is hack, because `bytes` package can be not imported,
+ // and we cannot do "true" comparison with `Buffer` object.
+ return se.X, strings.TrimPrefix(t.String(), "*") == "bytes.Buffer"
+ }
+
+ return nil, false
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
index b0c0d1302..ad39c72d7 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_interface.go
@@ -15,8 +15,11 @@ func isEmptyInterface(pass *analysis.Pass, expr ast.Expr) bool {
if !ok {
return false
}
+ return isEmptyInterfaceType(t.Type)
+}
- iface, ok := t.Type.Underlying().(*types.Interface)
+func isEmptyInterfaceType(t types.Type) bool {
+ iface, ok := t.Underlying().(*types.Interface)
return ok && iface.NumMethods() == 0
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go
new file mode 100644
index 000000000..daf309339
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/helpers_pkg_func.go
@@ -0,0 +1,59 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/Antonboom/testifylint/internal/analysisutil"
+)
+
+func isFmtSprintfCall(pass *analysis.Pass, e ast.Expr) ([]ast.Expr, bool) {
+ ce, ok := e.(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+ return ce.Args, isPkgFnCall(pass, ce, "fmt", "Sprintf")
+}
+
+func isJSONRawMessageCast(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "encoding/json", "RawMessage")
+}
+
+func isRegexpMustCompileCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "regexp", "MustCompile")
+}
+
+func isStringsContainsCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Contains")
+}
+
+func isStringsReplaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Replace")
+}
+
+func isStringsReplaceAllCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "ReplaceAll")
+}
+
+func isStringsTrimCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "Trim")
+}
+
+func isStringsTrimSpaceCall(pass *analysis.Pass, ce *ast.CallExpr) bool {
+ return isPkgFnCall(pass, ce, "strings", "TrimSpace")
+}
+
+func isPkgFnCall(pass *analysis.Pass, ce *ast.CallExpr, pkg, fn string) bool {
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+
+ fnObj := analysisutil.ObjectOf(pass.Pkg, pkg, fn)
+ if fnObj == nil {
+ return false
+ }
+
+ return analysisutil.IsObj(pass.TypesInfo, se.Sel, fnObj)
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
index 47330568c..c240a6174 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/len.go
@@ -31,17 +31,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost
a, b := call.Args[0], call.Args[1]
if lenArg, expectedLen, ok := xorLenCall(pass, a, b); ok {
- if expectedLen == b && !isIntBasicLit(expectedLen) {
+ if _, ok := isIntBasicLit(expectedLen); (expectedLen == b) && !ok {
// https://github.com/Antonboom/testifylint/issues/9
return nil
}
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: a.Pos(),
End: b.End(),
NewText: formatAsCallArgs(pass, lenArg, expectedLen),
- }),
- )
+ })
}
case "True":
@@ -50,14 +49,16 @@ func (checker Len) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnost
}
expr := call.Args[0]
- if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok && isIntBasicLit(expectedLen) {
+ if lenArg, expectedLen, ok := isLenEquality(pass, expr); ok {
+ if _, ok := isIntBasicLit(expectedLen); !ok {
+ return nil
+ }
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: expr.Pos(),
End: expr.End(),
NewText: formatAsCallArgs(pass, lenArg, expectedLen),
- }),
- )
+ })
}
}
return nil
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go
index 274021f67..a61bbdfcb 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_postive.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/negative_positive.go
@@ -48,12 +48,11 @@ func (checker NegativePositive) checkNegative(pass *analysis.Pass, call *CallMet
newUseNegativeDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Negative"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
// NOTE(a.telyshev): We ignore uint-asserts as being no sense for assert.Negative.
@@ -114,12 +113,11 @@ func (checker NegativePositive) checkPositive(pass *analysis.Pass, call *CallMet
newUsePositiveDiagnostic := func(replaceStart, replaceEnd token.Pos, replaceWith ast.Expr) *analysis.Diagnostic {
const proposed = "Positive"
return newUseFunctionDiagnostic(checker.Name(), call, proposed,
- newSuggestedFuncReplacement(call, proposed, analysis.TextEdit{
+ analysis.TextEdit{
Pos: replaceStart,
End: replaceEnd,
NewText: analysisutil.NodeBytes(pass.Fset, replaceWith),
- }),
- )
+ })
}
switch call.Fn.NameFTrimmed {
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
index 47c4a7383..fc1adb7ea 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/nil_compare.go
@@ -47,10 +47,9 @@ func (checker NilCompare) Check(pass *analysis.Pass, call *CallMeta) *analysis.D
}
return newUseFunctionDiagnostic(checker.Name(), call, proposedFn,
- newSuggestedFuncReplacement(call, proposedFn, analysis.TextEdit{
+ analysis.TextEdit{
Pos: call.Args[0].Pos(),
End: call.Args[1].End(),
NewText: analysisutil.NodeBytes(pass.Fset, survivingArg),
- }),
- )
+ })
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go
index cfb47b542..4f6e3f9c4 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/printf/printf.go
@@ -213,9 +213,9 @@ func isFormatter(typ types.Type) bool {
types.Identical(sig.Params().At(1).Type(), types.Typ[types.Rune])
}
-// isTypeParam reports whether t is a type parameter.
+// isTypeParam reports whether t is a type parameter (or an alias of one).
func isTypeParam(t types.Type) bool {
- _, ok := t.(*types.TypeParam)
+ _, ok := types.Unalias(t).(*types.TypeParam)
return ok
}
@@ -224,7 +224,7 @@ func isTypeParam(t types.Type) bool {
// This function avoids allocating the concatenation of "pkg.Name",
// which is important for the performance of syntax matching.
func isNamedType(t types.Type, pkgPath string, names ...string) bool {
- n, ok := t.(*types.Named)
+ n, ok := types.Unalias(t).(*types.Named)
if !ok {
return false
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go
new file mode 100644
index 000000000..d634b74bd
--- /dev/null
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/regexp.go
@@ -0,0 +1,44 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// Regexp detects situations like
+//
+// assert.Regexp(t, regexp.MustCompile(`\[.*\] DEBUG \(.*TestNew.*\): message`), out)
+// assert.NotRegexp(t, regexp.MustCompile(`\[.*\] TRACE message`), out)
+//
+// and requires
+//
+// assert.Regexp(t, `\[.*\] DEBUG \(.*TestNew.*\): message`, out)
+// assert.NotRegexp(t, `\[.*\] TRACE message`, out)
+type Regexp struct{}
+
+// NewRegexp constructs Regexp checker.
+func NewRegexp() Regexp { return Regexp{} }
+func (Regexp) Name() string { return "regexp" }
+
+func (checker Regexp) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ switch call.Fn.NameFTrimmed {
+ default:
+ return nil
+ case "Regexp", "NotRegexp":
+ }
+
+ if len(call.Args) < 1 {
+ return nil
+ }
+
+ ce, ok := call.Args[0].(*ast.CallExpr)
+ if !ok || len(ce.Args) != 1 {
+ return nil
+ }
+
+ if isRegexpMustCompileCall(pass, ce) {
+ return newRemoveMustCompileDiagnostic(pass, checker.Name(), call, ce, ce.Args[0])
+ }
+ return nil
+}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
index 4303828fd..e4e30aaf4 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/require_error.go
@@ -134,7 +134,7 @@ func (checker RequireError) Check(pass *analysis.Pass, inspector *inspector.Insp
}
diagnostics = append(diagnostics,
- *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport, nil))
+ *newDiagnostic(checker.Name(), c.testifyCall, requireErrorReport))
}
}
@@ -197,11 +197,10 @@ func findRootIf(stack []ast.Node) *ast.IfStmt {
nearestIf, i := findNearestNodeWithIdx[*ast.IfStmt](stack)
for ; i > 0; i-- {
parent, ok := stack[i-1].(*ast.IfStmt)
- if ok {
- nearestIf = parent
- } else {
+ if !ok {
break
}
+ nearestIf = parent
}
return nearestIf
}
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go
index f830fd2a5..4374c9359 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_broken_parallel.go
@@ -68,7 +68,7 @@ func (checker SuiteBrokenParallel) Check(pass *analysis.Pass, insp *inspector.In
}
nextLine := pass.Fset.Position(ce.Pos()).Line + 1
- d := newDiagnostic(checker.Name(), ce, report, &analysis.SuggestedFix{
+ d := newDiagnostic(checker.Name(), ce, report, analysis.SuggestedFix{
Message: fmt.Sprintf("Remove `%s` call", analysisutil.NodeString(pass.Fset, ce)),
TextEdits: []analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
index 6150ae78d..4fbfbe7e0 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_dont_use_pkg.go
@@ -60,7 +60,7 @@ func (checker SuiteDontUsePkg) Check(pass *analysis.Pass, call *CallMeta) *analy
}
msg := fmt.Sprintf("use %s.%s", newSelector, call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: fmt.Sprintf("Replace `%s` with `%s`", call.SelectorXStr, newSelector),
TextEdits: []analysis.TextEdit{
// Replace package function with suite method.
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
index 9adfe5190..fdea324fd 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_extra_assert_call.go
@@ -61,7 +61,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *
}
msg := fmt.Sprintf("use an explicit %s.Assert().%s", analysisutil.NodeString(pass.Fset, x), call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: "Add `Assert()` call",
TextEdits: []analysis.TextEdit{{
Pos: x.End(),
@@ -85,7 +85,7 @@ func (checker SuiteExtraAssertCall) Check(pass *analysis.Pass, call *CallMeta) *
}
msg := fmt.Sprintf("need to simplify the assertion to %s.%s", analysisutil.NodeString(pass.Fset, se.X), call.Fn.Name)
- return newDiagnostic(checker.Name(), call, msg, &analysis.SuggestedFix{
+ return newDiagnostic(checker.Name(), call, msg, analysis.SuggestedFix{
Message: "Remove `Assert()` call",
TextEdits: []analysis.TextEdit{{
Pos: se.Sel.Pos(),
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go
index 67d9c252b..525d5ffd8 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_subtest_run.go
@@ -53,7 +53,7 @@ func (checker SuiteSubtestRun) Check(pass *analysis.Pass, insp *inspector.Inspec
if implementsTestifySuite(pass, tCallSel.X) && implementsTestingT(pass, tCall) {
msg := fmt.Sprintf("use %s.Run to run subtest", analysisutil.NodeString(pass.Fset, tCallSel.X))
- diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), ce, msg, nil))
+ diagnostics = append(diagnostics, *newDiagnostic(checker.Name(), ce, msg))
}
})
return diagnostics
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
index 59455290d..ef8d82132 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/suite_thelper.go
@@ -50,8 +50,8 @@ func (checker SuiteTHelper) Check(pass *analysis.Pass, inspector *inspector.Insp
return
}
- msg := fmt.Sprintf("suite helper method must start with " + helperCallStr)
- d := newDiagnostic(checker.Name(), fd, msg, &analysis.SuggestedFix{
+ msg := "suite helper method must start with " + helperCallStr
+ d := newDiagnostic(checker.Name(), fd, msg, analysis.SuggestedFix{
Message: fmt.Sprintf("Insert `%s`", helperCallStr),
TextEdits: []analysis.TextEdit{
{
diff --git a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
index 6f206d095..045706e5d 100644
--- a/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
+++ b/vendor/github.com/Antonboom/testifylint/internal/checkers/useless_assert.go
@@ -10,15 +10,40 @@ import (
// UselessAssert detects useless asserts like
//
-// 1) Asserting of the same variable
-//
+// assert.Contains(t, tt.value, tt.value)
+// assert.ElementsMatch(t, tt.value, tt.value)
// assert.Equal(t, tt.value, tt.value)
-// assert.ElementsMatch(t, users, users)
+// assert.EqualExportedValues(t, tt.value, tt.value)
// ...
+//
// assert.True(t, num > num)
+// assert.True(t, num < num)
+// assert.True(t, num >= num)
+// assert.True(t, num <= num)
+// assert.True(t, num == num)
+// assert.True(t, num != num)
+//
+// assert.False(t, num > num)
+// assert.False(t, num < num)
+// assert.False(t, num >= num)
+// assert.False(t, num <= num)
// assert.False(t, num == num)
+// assert.False(t, num != num)
//
-// 2) Open for contribution...
+// assert.Empty(t, "")
+// assert.False(t, false)
+// assert.Implements(t, (*any)(nil), new(Conn))
+// assert.Negative(t, -42)
+// assert.Nil(t, nil)
+// assert.NoError(t, nil)
+// assert.NotEmpty(t, "value")
+// assert.NotZero(t, 42)
+// assert.NotZero(t, "value")
+// assert.Positive(t, 42)
+// assert.True(t, true)
+// assert.Zero(t, 0)
+// assert.Zero(t, "")
+// assert.Zero(t, nil)
type UselessAssert struct{}
// NewUselessAssert constructs UselessAssert checker.
@@ -26,6 +51,58 @@ func NewUselessAssert() UselessAssert { return UselessAssert{} }
func (UselessAssert) Name() string { return "useless-assert" }
func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
+ if d := checker.checkSameVars(pass, call); d != nil {
+ return d
+ }
+
+ var isMeaningless bool
+ switch call.Fn.NameFTrimmed {
+ case "Empty":
+ isMeaningless = (len(call.Args) >= 1) && isEmptyStringLit(call.Args[0])
+
+ case "False":
+ isMeaningless = (len(call.Args) >= 1) && isUntypedFalse(pass, call.Args[0])
+
+ case "Implements":
+ if len(call.Args) < 2 {
+ return nil
+ }
+
+ elem, ok := isPointer(pass, call.Args[0])
+ isMeaningless = ok && isEmptyInterfaceType(elem)
+
+ case "Negative":
+ isMeaningless = (len(call.Args) >= 1) && isNegativeIntNumber(call.Args[0])
+
+ case "Nil", "NoError":
+ isMeaningless = (len(call.Args) >= 1) && isNil(call.Args[0])
+
+ case "NotEmpty":
+ isMeaningless = (len(call.Args) >= 1) && isNotEmptyStringLit(call.Args[0])
+
+ case "NotZero":
+ isMeaningless = (len(call.Args) >= 1) &&
+ (isNotEmptyStringLit(call.Args[0]) ||
+ isNegativeIntNumber(call.Args[0]) || isPositiveIntNumber(call.Args[0]))
+
+ case "Positive":
+ isMeaningless = (len(call.Args) >= 1) && isPositiveIntNumber(call.Args[0])
+
+ case "True":
+ isMeaningless = (len(call.Args) >= 1) && isUntypedTrue(pass, call.Args[0])
+
+ case "Zero":
+ isMeaningless = (len(call.Args) >= 1) &&
+ (isZero(call.Args[0]) || isEmptyStringLit(call.Args[0]) || isNil(call.Args[0]))
+ }
+
+ if isMeaningless {
+ return newDiagnostic(checker.Name(), call, "meaningless assertion")
+ }
+ return nil
+}
+
+func (checker UselessAssert) checkSameVars(pass *analysis.Pass, call *CallMeta) *analysis.Diagnostic {
var first, second ast.Node
switch call.Fn.NameFTrimmed {
@@ -82,7 +159,7 @@ func (checker UselessAssert) Check(pass *analysis.Pass, call *CallMeta) *analysi
}
if analysisutil.NodeString(pass.Fset, first) == analysisutil.NodeString(pass.Fset, second) {
- return newDiagnostic(checker.Name(), call, "asserting of the same variable", nil)
+ return newDiagnostic(checker.Name(), call, "asserting of the same variable")
}
return nil
}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/README.md b/vendor/github.com/alecthomas/go-check-sumtype/README.md
index 36614ef40..2ccec4e84 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/README.md
+++ b/vendor/github.com/alecthomas/go-check-sumtype/README.md
@@ -86,7 +86,8 @@ mysumtype.go:18:2: exhaustiveness check failed for sum type 'MySumType': missing
```
Adding either a `default` clause or a clause to handle `*VariantB` will cause
-exhaustive checks to pass.
+exhaustive checks to pass. To prevent `default` clauses from automatically
+passing checks, set the `-default-signifies-exhasutive=false` flag.
As a special case, if the type switch statement contains a `default` clause
that always panics, then exhaustiveness checks are still performed.
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/check.go b/vendor/github.com/alecthomas/go-check-sumtype/check.go
index 21d751af4..1a0a32517 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/check.go
+++ b/vendor/github.com/alecthomas/go-check-sumtype/check.go
@@ -39,7 +39,7 @@ func (e inexhaustiveError) Names() []string {
// check does exhaustiveness checking for the given sum type definitions in the
// given package. Every instance of inexhaustive case analysis is returned.
-func check(pkg *packages.Package, defs []sumTypeDef) []error {
+func check(pkg *packages.Package, defs []sumTypeDef, config Config) []error {
var errs []error
for _, astfile := range pkg.Syntax {
ast.Inspect(astfile, func(n ast.Node) bool {
@@ -47,7 +47,7 @@ func check(pkg *packages.Package, defs []sumTypeDef) []error {
if !ok {
return true
}
- if err := checkSwitch(pkg, defs, swtch); err != nil {
+ if err := checkSwitch(pkg, defs, swtch, config); err != nil {
errs = append(errs, err)
}
return true
@@ -67,8 +67,9 @@ func checkSwitch(
pkg *packages.Package,
defs []sumTypeDef,
swtch *ast.TypeSwitchStmt,
+ config Config,
) error {
- def, missing := missingVariantsInSwitch(pkg, defs, swtch)
+ def, missing := missingVariantsInSwitch(pkg, defs, swtch, config)
if len(missing) > 0 {
return inexhaustiveError{
Position: pkg.Fset.Position(swtch.Pos()),
@@ -87,6 +88,7 @@ func missingVariantsInSwitch(
pkg *packages.Package,
defs []sumTypeDef,
swtch *ast.TypeSwitchStmt,
+ config Config,
) (*sumTypeDef, []types.Object) {
asserted := findTypeAssertExpr(swtch)
ty := pkg.TypesInfo.TypeOf(asserted)
@@ -97,7 +99,7 @@ func missingVariantsInSwitch(
return nil, nil
}
variantExprs, hasDefault := switchVariants(swtch)
- if hasDefault && !defaultClauseAlwaysPanics(swtch) {
+ if config.DefaultSignifiesExhaustive && hasDefault && !defaultClauseAlwaysPanics(swtch) {
// A catch-all case defeats all exhaustiveness checks.
return def, nil
}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/config.go b/vendor/github.com/alecthomas/go-check-sumtype/config.go
new file mode 100644
index 000000000..759176eb7
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/config.go
@@ -0,0 +1,5 @@
+package gochecksumtype
+
+type Config struct {
+ DefaultSignifiesExhaustive bool
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5 b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5
new file mode 100644
index 000000000..77c7b016c
--- /dev/null
+++ b/vendor/github.com/alecthomas/go-check-sumtype/renovate.json5
@@ -0,0 +1,18 @@
+{
+ $schema: "https://docs.renovatebot.com/renovate-schema.json",
+ extends: [
+ "config:recommended",
+ ":semanticCommits",
+ ":semanticCommitTypeAll(chore)",
+ ":semanticCommitScope(deps)",
+ "group:allNonMajor",
+ "schedule:earlyMondays", // Run once a week.
+ ],
+ packageRules: [
+ {
+ matchPackageNames: ["golangci-lint"],
+ matchManagers: ["hermit"],
+ enabled: false,
+ },
+ ],
+}
diff --git a/vendor/github.com/alecthomas/go-check-sumtype/run.go b/vendor/github.com/alecthomas/go-check-sumtype/run.go
index fdcb643c5..f32942d7a 100644
--- a/vendor/github.com/alecthomas/go-check-sumtype/run.go
+++ b/vendor/github.com/alecthomas/go-check-sumtype/run.go
@@ -3,7 +3,7 @@ package gochecksumtype
import "golang.org/x/tools/go/packages"
// Run sumtype checking on the given packages.
-func Run(pkgs []*packages.Package) []error {
+func Run(pkgs []*packages.Package, config Config) []error {
var errs []error
decls, err := findSumTypeDecls(pkgs)
@@ -18,7 +18,7 @@ func Run(pkgs []*packages.Package) []error {
}
for _, pkg := range pkgs {
- if pkgErrs := check(pkg, defs); pkgErrs != nil {
+ if pkgErrs := check(pkg, defs, config); pkgErrs != nil {
errs = append(errs, pkgErrs...)
}
}
diff --git a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
index f78bb8cb6..a55735928 100644
--- a/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
+++ b/vendor/github.com/alexkohler/nakedret/v2/nakedret.go
@@ -22,10 +22,11 @@ import (
const pwd = "./"
-func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer {
+func NakedReturnAnalyzer(defaultLines uint, skipTestFiles bool) *analysis.Analyzer {
nakedRet := &NakedReturnRunner{}
flags := flag.NewFlagSet("nakedret", flag.ExitOnError)
flags.UintVar(&nakedRet.MaxLength, "l", defaultLines, "maximum number of lines for a naked return function")
+ flags.BoolVar(&nakedRet.SkipTestFiles, "skip-test-files", skipTestFiles, "set to true to skip test files")
var analyzer = &analysis.Analyzer{
Name: "nakedret",
Doc: "Checks that functions with naked returns are not longer than a maximum size (can be zero).",
@@ -37,7 +38,8 @@ func NakedReturnAnalyzer(defaultLines uint) *analysis.Analyzer {
}
type NakedReturnRunner struct {
- MaxLength uint
+ MaxLength uint
+ SkipTestFiles bool
}
func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) {
@@ -49,18 +51,20 @@ func (n *NakedReturnRunner) run(pass *analysis.Pass) (any, error) {
(*ast.ReturnStmt)(nil),
}
retVis := &returnsVisitor{
- pass: pass,
- f: pass.Fset,
- maxLength: n.MaxLength,
+ pass: pass,
+ f: pass.Fset,
+ maxLength: n.MaxLength,
+ skipTestFiles: n.SkipTestFiles,
}
inspector.Nodes(nodeFilter, retVis.NodesVisit)
return nil, nil
}
type returnsVisitor struct {
- pass *analysis.Pass
- f *token.FileSet
- maxLength uint
+ pass *analysis.Pass
+ f *token.FileSet
+ maxLength uint
+ skipTestFiles bool
// functions contains funcInfo for each nested function definition encountered while visiting the AST.
functions []funcInfo
@@ -74,7 +78,7 @@ type funcInfo struct {
reportNaked bool
}
-func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error {
+func checkNakedReturns(args []string, maxLength *uint, skipTestFiles bool, setExitStatus bool) error {
fset := token.NewFileSet()
@@ -87,7 +91,7 @@ func checkNakedReturns(args []string, maxLength *uint, setExitStatus bool) error
return errors.New("max length nil")
}
- analyzer := NakedReturnAnalyzer(*maxLength)
+ analyzer := NakedReturnAnalyzer(*maxLength, skipTestFiles)
pass := &analysis.Pass{
Analyzer: analyzer,
Fset: fset,
@@ -292,6 +296,9 @@ func (v *returnsVisitor) NodesVisit(node ast.Node, push bool) bool {
if push && funcType != nil {
// Push function info to track returns for this function
file := v.f.File(node.Pos())
+ if v.skipTestFiles && strings.HasSuffix(file.Name(), "_test.go") {
+ return false
+ }
length := file.Position(node.End()).Line - file.Position(node.Pos()).Line
if length == 0 {
// consider functions that finish on the same line as they start as single line functions, not zero lines!
diff --git a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
index eaf408d6f..1972379df 100644
--- a/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/bkielbasa/cyclop/pkg/analyzer/analyzer.go
@@ -19,9 +19,13 @@ var (
skipTests bool
)
+const (
+ defaultMaxComplexity = 10
+)
+
//nolint:gochecknoinits
func init() {
- flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have")
+ flagSet.IntVar(&maxComplexity, "maxComplexity", defaultMaxComplexity, "max complexity the function can have")
flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max average complexity in package")
flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well")
}
@@ -29,7 +33,7 @@ func init() {
func NewAnalyzer() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "cyclop",
- Doc: "calculates cyclomatic complexity",
+ Doc: "checks function and package cyclomatic complexity",
Run: run,
Flags: flagSet,
}
@@ -40,9 +44,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
var pkgName string
var pkgPos token.Pos
- for _, f := range pass.Files {
- ast.Inspect(f, func(node ast.Node) bool {
- f, ok := node.(*ast.FuncDecl)
+ for _, file := range pass.Files {
+ ast.Inspect(file, func(node ast.Node) bool {
+ funcDecl, ok := node.(*ast.FuncDecl)
if !ok {
if node == nil {
return true
@@ -55,15 +59,15 @@ func run(pass *analysis.Pass) (interface{}, error) {
return true
}
- if skipTests && testFunc(f) {
+ if skipTests && testFunc(funcDecl) {
return true
}
count++
- comp := complexity(f)
+ comp := complexity(funcDecl)
sum += float64(comp)
if comp > maxComplexity {
- pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity)
+ pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", funcDecl.Name.Name, comp, maxComplexity)
}
return true
diff --git a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
index f1bf20fab..39d3cd44e 100644
--- a/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
+++ b/vendor/github.com/breml/bidichk/pkg/bidichk/bidichk.go
@@ -14,7 +14,7 @@ import (
)
const (
- doc = "bidichk detects dangerous unicode character sequences"
+ doc = "Checks for dangerous unicode character sequences"
disallowedDoc = `comma separated list of disallowed runes (full name or short name)
Supported runes
@@ -142,25 +142,28 @@ func NewAnalyzer() *analysis.Analyzer {
}
func (b bidichk) run(pass *analysis.Pass) (interface{}, error) {
- var err error
+ readFile := pass.ReadFile
+ if readFile == nil {
+ readFile = os.ReadFile
+ }
- pass.Fset.Iterate(func(f *token.File) bool {
- if strings.HasPrefix(f.Name(), "$GOROOT") {
- return true
+ for _, astFile := range pass.Files {
+ f := pass.Fset.File(astFile.FileStart)
+ if f == nil {
+ continue
}
- return b.check(f.Name(), f.Pos(0), pass) == nil
- })
-
- return nil, err
-}
+ body, err := readFile(f.Name())
+ if err != nil {
+ return nil, err
+ }
-func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) error {
- body, err := os.ReadFile(filename)
- if err != nil {
- return err
+ b.check(body, f.Pos(0), pass)
}
+ return nil, nil
+}
+func (b bidichk) check(body []byte, pos token.Pos, pass *analysis.Pass) {
for name, r := range b.disallowedRunes {
start := 0
for {
@@ -175,6 +178,4 @@ func (b bidichk) check(filename string, pos token.Pos, pass *analysis.Pass) erro
start += utf8.RuneLen(r)
}
}
-
- return nil
}
diff --git a/vendor/github.com/breml/errchkjson/.goreleaser.yml b/vendor/github.com/breml/errchkjson/.goreleaser.yml
index a05c172cb..111369053 100644
--- a/vendor/github.com/breml/errchkjson/.goreleaser.yml
+++ b/vendor/github.com/breml/errchkjson/.goreleaser.yml
@@ -1,3 +1,6 @@
+---
+version: 2
+
# This is an example .goreleaser.yml file with some sane defaults.
# Make sure to check the documentation at http://goreleaser.com
before:
@@ -23,9 +26,9 @@ archives:
{{- else }}{{ .Arch }}{{ end }}
{{- if .Arm }}v{{ .Arm }}{{ end -}}
snapshot:
- name_template: "{{ .Tag }}-next"
+ version_template: "{{ .Tag }}-next"
changelog:
- skip: true
+ disable: true
release:
github:
owner: breml
diff --git a/vendor/github.com/breml/errchkjson/README.md b/vendor/github.com/breml/errchkjson/README.md
index 197959738..a387ea23d 100644
--- a/vendor/github.com/breml/errchkjson/README.md
+++ b/vendor/github.com/breml/errchkjson/README.md
@@ -55,7 +55,7 @@ response type, the linter will warn you.
Download `errchkjson` from the [releases](https://github.com/breml/errchkjson/releases) or get the latest version from source with:
```shell
-go get github.com/breml/errchkjson/cmd/errchkjson
+go install github.com/breml/errchkjson/cmd/errchkjson@latest
```
## Usage
diff --git a/vendor/github.com/breml/errchkjson/errchkjson.go b/vendor/github.com/breml/errchkjson/errchkjson.go
index 4a23929cf..7c8cd82e9 100644
--- a/vendor/github.com/breml/errchkjson/errchkjson.go
+++ b/vendor/github.com/breml/errchkjson/errchkjson.go
@@ -25,7 +25,7 @@ func NewAnalyzer() *analysis.Analyzer {
a := &analysis.Analyzer{
Name: "errchkjson",
- Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occations, where the check for the returned error can be omitted.",
+ Doc: "Checks types passed to the json encoding functions. Reports unsupported types and reports occurrences where the check for the returned error can be omitted.",
Run: errchkjson.run,
}
diff --git a/vendor/github.com/ckaznocha/intrange/intrange.go b/vendor/github.com/ckaznocha/intrange/intrange.go
index 33cddf303..44a15091e 100644
--- a/vendor/github.com/ckaznocha/intrange/intrange.go
+++ b/vendor/github.com/ckaznocha/intrange/intrange.go
@@ -25,8 +25,9 @@ var (
)
const (
- msg = "for loop can be changed to use an integer range (Go 1.22+)"
- msgLenRange = "for loop can be changed to `i := range %s`"
+ msg = "for loop can be changed to use an integer range (Go 1.22+)"
+ msgLenRange = "for loop can be changed to `%s := range %s`"
+ msgLenRangeNoIdent = "for loop can be changed to `range %s`"
)
func run(pass *analysis.Pass) (any, error) {
@@ -243,21 +244,26 @@ func checkForStmt(pass *analysis.Pass, forStmt *ast.ForStmt) {
}
func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) {
- if rangeStmt.Key == nil {
+ if rangeStmt.Value != nil {
return
}
- ident, ok := rangeStmt.Key.(*ast.Ident)
- if !ok {
- return
- }
+ startPos := rangeStmt.Range
+ usesKey := rangeStmt.Key != nil
+ identName := ""
- if ident.Name == "_" {
- return
- }
+ if usesKey {
+ ident, ok := rangeStmt.Key.(*ast.Ident)
+ if !ok {
+ return
+ }
- if rangeStmt.Value != nil {
- return
+ if ident.Name == "_" {
+ usesKey = false
+ }
+
+ identName = ident.Name
+ startPos = ident.Pos()
}
if rangeStmt.X == nil {
@@ -295,18 +301,40 @@ func checkRangeStmt(pass *analysis.Pass, rangeStmt *ast.RangeStmt) {
return
}
+ if usesKey {
+ pass.Report(analysis.Diagnostic{
+ Pos: startPos,
+ End: x.End(),
+ Message: fmt.Sprintf(msgLenRange, identName, arg.Name),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: x.Pos(),
+ End: x.End(),
+ NewText: []byte(arg.Name),
+ },
+ },
+ },
+ },
+ })
+
+ return
+ }
+
pass.Report(analysis.Diagnostic{
- Pos: ident.Pos(),
+ Pos: startPos,
End: x.End(),
- Message: fmt.Sprintf(msgLenRange, arg.Name),
+ Message: fmt.Sprintf(msgLenRangeNoIdent, arg.Name),
SuggestedFixes: []analysis.SuggestedFix{
{
Message: fmt.Sprintf("Replace `len(%s)` with `%s`", arg.Name, arg.Name),
TextEdits: []analysis.TextEdit{
{
- Pos: x.Pos(),
+ Pos: startPos,
End: x.End(),
- NewText: []byte(arg.Name),
+ NewText: []byte(fmt.Sprintf("range %s", arg.Name)),
},
},
},
diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md
index be82827ca..d135bfe02 100644
--- a/vendor/github.com/fatih/color/README.md
+++ b/vendor/github.com/fatih/color/README.md
@@ -9,7 +9,7 @@ suits you.
## Install
-```bash
+```
go get github.com/fatih/color
```
@@ -30,6 +30,18 @@ color.Magenta("And many others ..")
```
+### RGB colors
+
+If your terminal supports 24-bit colors, you can use RGB color codes.
+
+```go
+color.RGB(255, 128, 0).Println("foreground orange")
+color.RGB(230, 42, 42).Println("foreground red")
+
+color.BgRGB(255, 128, 0).Println("background orange")
+color.BgRGB(230, 42, 42).Println("background red")
+```
+
### Mix and reuse colors
```go
@@ -49,6 +61,11 @@ boldRed.Println("This will print text in bold red.")
whiteBackground := red.Add(color.BgWhite)
whiteBackground.Println("Red text with white background.")
+
+// Mix with RGB color codes
+color.RGB(255, 128, 0).AddBgRGB(0, 0, 0).Println("orange with black background")
+
+color.BgRGB(255, 128, 0).AddRGB(255, 255, 255).Println("orange background with white foreground")
```
### Use your own output (io.Writer)
@@ -161,10 +178,6 @@ c.Println("This prints again cyan...")
To output color in GitHub Actions (or other CI systems that support ANSI colors), make sure to set `color.NoColor = false` so that it bypasses the check for non-tty output streams.
-## Todo
-
-* Save/Return previous values
-* Evaluate fmt.Formatter interface
## Credits
diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go
index 81094e87c..ee39b408e 100644
--- a/vendor/github.com/fatih/color/color.go
+++ b/vendor/github.com/fatih/color/color.go
@@ -98,6 +98,9 @@ const (
FgMagenta
FgCyan
FgWhite
+
+ // used internally for 256 and 24-bit coloring
+ foreground
)
// Foreground Hi-Intensity text colors
@@ -122,6 +125,9 @@ const (
BgMagenta
BgCyan
BgWhite
+
+ // used internally for 256 and 24-bit coloring
+ background
)
// Background Hi-Intensity text colors
@@ -150,6 +156,30 @@ func New(value ...Attribute) *Color {
return c
}
+// RGB returns a new foreground color in 24-bit RGB.
+func RGB(r, g, b int) *Color {
+ return New(foreground, 2, Attribute(r), Attribute(g), Attribute(b))
+}
+
+// BgRGB returns a new background color in 24-bit RGB.
+func BgRGB(r, g, b int) *Color {
+ return New(background, 2, Attribute(r), Attribute(g), Attribute(b))
+}
+
+// AddRGB is used to chain foreground RGB SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0).
+func (c *Color) AddRGB(r, g, b int) *Color {
+ c.params = append(c.params, foreground, 2, Attribute(r), Attribute(g), Attribute(b))
+ return c
+}
+
+// AddRGB is used to chain background RGB SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: .Add(34, 0, 12).Add(255, 128, 0).
+func (c *Color) AddBgRGB(r, g, b int) *Color {
+ c.params = append(c.params, background, 2, Attribute(r), Attribute(g), Attribute(b))
+ return c
+}
+
// Set sets the given parameters immediately. It will change the color of
// output with the given SGR parameters until color.Unset() is called.
func Set(p ...Attribute) *Color {
@@ -401,7 +431,7 @@ func (c *Color) format() string {
func (c *Color) unformat() string {
//return fmt.Sprintf("%s[%dm", escape, Reset)
- //for each element in sequence let's use the speficic reset escape, ou the generic one if not found
+ //for each element in sequence let's use the specific reset escape, or the generic one if not found
format := make([]string, len(c.params))
for i, v := range c.params {
format[i] = strconv.Itoa(int(Reset))
diff --git a/vendor/github.com/ghostiam/protogetter/processor.go b/vendor/github.com/ghostiam/protogetter/processor.go
index eca82939d..44f346e85 100644
--- a/vendor/github.com/ghostiam/protogetter/processor.go
+++ b/vendor/github.com/ghostiam/protogetter/processor.go
@@ -218,7 +218,7 @@ func (c *processor) processInner(expr ast.Expr) {
c.write("*")
c.processInner(x.X)
- case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit:
+ case *ast.CompositeLit, *ast.TypeAssertExpr, *ast.ArrayType, *ast.FuncLit, *ast.SliceExpr:
// Process the node as is.
c.write(formatNode(x))
diff --git a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
index 306756834..345274f1c 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/caseOrder_checker.go
@@ -82,7 +82,7 @@ func (c *caseOrderChecker) warnUnknownType(cause, concrete ast.Node) {
c.ctx.Warn(cause, "type is not defined %s", concrete)
}
-func (c *caseOrderChecker) checkSwitch(s *ast.SwitchStmt) {
+func (c *caseOrderChecker) checkSwitch(_ *ast.SwitchStmt) {
// TODO(quasilyte): can handle expression cases that overlap.
// Cases that have narrower value range should go before wider ones.
}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
index 1f6e948d5..96d2dd0e6 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/internal/astwalk/walk_handler.go
@@ -17,7 +17,7 @@ type WalkHandler struct {
// EnterFile is a default walkerEvents.EnterFile implementation
// that reports every file as accepted candidate for checking.
-func (w *WalkHandler) EnterFile(f *ast.File) bool {
+func (w *WalkHandler) EnterFile(_ *ast.File) bool {
return true
}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go
new file mode 100644
index 000000000..f4851d402
--- /dev/null
+++ b/vendor/github.com/go-critic/go-critic/checkers/rangeAppendAll_checker.go
@@ -0,0 +1,100 @@
+package checkers
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/go-critic/go-critic/checkers/internal/astwalk"
+ "github.com/go-critic/go-critic/linter"
+ "github.com/go-toolsmith/astcast"
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+func init() {
+ var info linter.CheckerInfo
+ info.Name = "rangeAppendAll"
+ info.Tags = []string{linter.DiagnosticTag, linter.ExperimentalTag}
+ info.Summary = "Detects append all its data while range it"
+ info.Before = `for _, n := range ns {
+ ...
+ rs = append(rs, ns...) // append all slice data
+ }
+}`
+ info.After = `for _, n := range ns {
+ ...
+ rs = append(rs, n)
+ }
+}`
+
+ collection.AddChecker(&info, func(ctx *linter.CheckerContext) (linter.FileWalker, error) {
+ c := &rangeAppendAllChecker{ctx: ctx}
+ return astwalk.WalkerForStmt(c), nil
+ })
+}
+
+type rangeAppendAllChecker struct {
+ astwalk.WalkHandler
+ ctx *linter.CheckerContext
+}
+
+func (c *rangeAppendAllChecker) VisitStmt(stmt ast.Stmt) {
+ rangeStmt, ok := stmt.(*ast.RangeStmt)
+ if !ok || len(rangeStmt.Body.List) == 0 {
+ return
+ }
+ rangeIdent, ok := rangeStmt.X.(*ast.Ident)
+ if !ok {
+ return
+ }
+ rangeObj := c.ctx.TypesInfo.ObjectOf(rangeIdent)
+
+ astutil.Apply(rangeStmt.Body, nil, func(cur *astutil.Cursor) bool {
+ appendFrom := c.getValidAppendFrom(cur.Node())
+ if appendFrom != nil {
+ appendFromObj := c.ctx.TypesInfo.ObjectOf(appendFrom)
+ if appendFromObj == rangeObj {
+ c.warn(appendFrom)
+ }
+ }
+ return true
+ })
+}
+
+func (c *rangeAppendAllChecker) getValidAppendFrom(expr ast.Node) *ast.Ident {
+ call := astcast.ToCallExpr(expr)
+ if len(call.Args) != 2 || call.Ellipsis == token.NoPos {
+ return nil
+ }
+ if qualifiedName(call.Fun) != "append" {
+ return nil
+ }
+ if c.isSliceLiteral(call.Args[0]) {
+ return nil
+ }
+ appendFrom, ok := call.Args[1].(*ast.Ident)
+ if !ok {
+ return nil
+ }
+ return appendFrom
+}
+
+func (c *rangeAppendAllChecker) isSliceLiteral(arg ast.Expr) bool {
+ switch v := arg.(type) {
+ // []T{}, []T{n}
+ case *ast.CompositeLit:
+ return true
+ // []T(nil)
+ case *ast.CallExpr:
+ if astcast.ToArrayType(v.Fun) != astcast.NilArrayType && len(v.Args) == 1 {
+ id := astcast.ToIdent(v.Args[0])
+ return id.Name == "nil" && id.Obj == nil
+ }
+ return false
+ default:
+ return false
+ }
+}
+
+func (c *rangeAppendAllChecker) warn(appendFrom *ast.Ident) {
+ c.ctx.Warn(appendFrom, "append all `%s` data while range it", appendFrom)
+}
diff --git a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
index 29723a69a..485819842 100644
--- a/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
+++ b/vendor/github.com/go-critic/go-critic/checkers/ruleguard_checker.go
@@ -87,7 +87,7 @@ func newErrorHandler(failOnErrorFlag string) (*parseErrorHandler, error) {
failOnErrorPredicates := map[string]func(error) bool{
"dsl": func(err error) bool { var e *ruleguard.ImportError; return !errors.As(err, &e) },
"import": func(err error) bool { var e *ruleguard.ImportError; return errors.As(err, &e) },
- "all": func(err error) bool { return true },
+ "all": func(_ error) bool { return true },
}
for _, k := range strings.Split(failOnErrorFlag, ",") {
if k == "" {
diff --git a/vendor/github.com/go-critic/go-critic/linter/helpers.go b/vendor/github.com/go-critic/go-critic/linter/helpers.go
index 0a3fc0292..d5110df64 100644
--- a/vendor/github.com/go-critic/go-critic/linter/helpers.go
+++ b/vendor/github.com/go-critic/go-critic/linter/helpers.go
@@ -116,7 +116,7 @@ func validateCheckerName(info *CheckerInfo) error {
return nil
}
-func validateCheckerDocumentation(info *CheckerInfo) error {
+func validateCheckerDocumentation(_ *CheckerInfo) error {
// TODO(quasilyte): validate documentation.
return nil
}
diff --git a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
index 2523c6ad9..1f3c69d4b 100644
--- a/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
+++ b/vendor/github.com/go-viper/mapstructure/v2/decode_hooks.go
@@ -6,6 +6,7 @@ import (
"fmt"
"net"
"net/netip"
+ "net/url"
"reflect"
"strconv"
"strings"
@@ -176,6 +177,26 @@ func StringToTimeDurationHookFunc() DecodeHookFunc {
}
}
+// StringToURLHookFunc returns a DecodeHookFunc that converts
+// strings to *url.URL.
+func StringToURLHookFunc() DecodeHookFunc {
+ return func(
+ f reflect.Type,
+ t reflect.Type,
+ data interface{},
+ ) (interface{}, error) {
+ if f.Kind() != reflect.String {
+ return data, nil
+ }
+ if t != reflect.TypeOf(&url.URL{}) {
+ return data, nil
+ }
+
+ // Convert it by parsing
+ return url.Parse(data.(string))
+ }
+}
+
// StringToIPHookFunc returns a DecodeHookFunc that converts
// strings to net.IP
func StringToIPHookFunc() DecodeHookFunc {
diff --git a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
index 1cd6204bb..e77e63ba3 100644
--- a/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
+++ b/vendor/github.com/go-viper/mapstructure/v2/mapstructure.go
@@ -278,6 +278,10 @@ type DecoderConfig struct {
// field name or tag. Defaults to `strings.EqualFold`. This can be used
// to implement case-sensitive tag values, support snake casing, etc.
MatchName func(mapKey, fieldName string) bool
+
+ // DecodeNil, if set to true, will cause the DecodeHook (if present) to run
+ // even if the input is nil. This can be used to provide default values.
+ DecodeNil bool
}
// A Decoder takes a raw interface value and turns it into structured
@@ -438,19 +442,26 @@ func (d *Decoder) Decode(input interface{}) error {
return err
}
+// isNil returns true if the input is nil or a typed nil pointer.
+func isNil(input interface{}) bool {
+ if input == nil {
+ return true
+ }
+ val := reflect.ValueOf(input)
+ return val.Kind() == reflect.Ptr && val.IsNil()
+}
+
// Decodes an unknown data type into a specific reflection value.
func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) error {
- var inputVal reflect.Value
- if input != nil {
- inputVal = reflect.ValueOf(input)
-
- // We need to check here if input is a typed nil. Typed nils won't
- // match the "input == nil" below so we check that here.
- if inputVal.Kind() == reflect.Ptr && inputVal.IsNil() {
- input = nil
- }
+ var (
+ inputVal = reflect.ValueOf(input)
+ outputKind = getKind(outVal)
+ decodeNil = d.config.DecodeNil && d.cachedDecodeHook != nil
+ )
+ if isNil(input) {
+ // Typed nils won't match the "input == nil" below, so reset input.
+ input = nil
}
-
if input == nil {
// If the data is nil, then we don't set anything, unless ZeroFields is set
// to true.
@@ -461,17 +472,31 @@ func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) e
d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
}
}
- return nil
+ if !decodeNil {
+ return nil
+ }
}
-
if !inputVal.IsValid() {
- // If the input value is invalid, then we just set the value
- // to be the zero value.
- outVal.Set(reflect.Zero(outVal.Type()))
- if d.config.Metadata != nil && name != "" {
- d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ if !decodeNil {
+ // If the input value is invalid, then we just set the value
+ // to be the zero value.
+ outVal.Set(reflect.Zero(outVal.Type()))
+ if d.config.Metadata != nil && name != "" {
+ d.config.Metadata.Keys = append(d.config.Metadata.Keys, name)
+ }
+ return nil
+ }
+ // Hooks need a valid inputVal, so reset it to zero value of outVal type.
+ switch outputKind {
+ case reflect.Struct, reflect.Map:
+ var mapVal map[string]interface{}
+ inputVal = reflect.ValueOf(mapVal) // create nil map pointer
+ case reflect.Slice, reflect.Array:
+ var sliceVal []interface{}
+ inputVal = reflect.ValueOf(sliceVal) // create nil slice pointer
+ default:
+ inputVal = reflect.Zero(outVal.Type())
}
- return nil
}
if d.cachedDecodeHook != nil {
@@ -482,9 +507,11 @@ func (d *Decoder) decode(name string, input interface{}, outVal reflect.Value) e
return fmt.Errorf("error decoding '%s': %w", name, err)
}
}
+ if isNil(input) {
+ return nil
+ }
var err error
- outputKind := getKind(outVal)
addMetaKey := true
switch outputKind {
case reflect.Bool:
@@ -765,8 +792,8 @@ func (d *Decoder) decodeBool(name string, data interface{}, val reflect.Value) e
}
default:
return fmt.Errorf(
- "'%s' expected type '%s', got unconvertible type '%s', value: '%v'",
- name, val.Type(), dataVal.Type(), data)
+ "'%s' expected type '%s', got unconvertible type '%#v', value: '%#v'",
+ name, val, dataVal, data)
}
return nil
diff --git a/vendor/github.com/jirfag/go-printf-func-name/LICENSE b/vendor/github.com/golangci/go-printf-func-name/LICENSE
index d06a809c2..4585140d1 100644
--- a/vendor/github.com/jirfag/go-printf-func-name/LICENSE
+++ b/vendor/github.com/golangci/go-printf-func-name/LICENSE
@@ -1,5 +1,6 @@
MIT License
+Copyright (c) 2024 Golangci-lint authors
Copyright (c) 2020 Isaev Denis
Permission is hereby granted, free of charge, to any person obtaining a copy
diff --git a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go
index 7937dd433..bce4b242e 100644
--- a/vendor/github.com/jirfag/go-printf-func-name/pkg/analyzer/analyzer.go
+++ b/vendor/github.com/golangci/go-printf-func-name/pkg/analyzer/analyzer.go
@@ -4,10 +4,9 @@ import (
"go/ast"
"strings"
+ "golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
-
- "golang.org/x/tools/go/analysis"
)
var Analyzer = &analysis.Analyzer{
@@ -18,12 +17,13 @@ var Analyzer = &analysis.Analyzer{
}
func run(pass *analysis.Pass) (interface{}, error) {
- inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+ insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
nodeFilter := []ast.Node{
(*ast.FuncDecl)(nil),
}
- inspector.Preorder(nodeFilter, func(node ast.Node) {
+ insp.Preorder(nodeFilter, func(node ast.Node) {
funcDecl := node.(*ast.FuncDecl)
if res := funcDecl.Type.Results; res != nil && len(res.List) != 0 {
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
index 299fd5279..c249084e1 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/cache/cache.go
@@ -1,525 +1,298 @@
-// Copyright 2017 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package cache implements a build artifact cache.
-//
-// This package is a slightly modified fork of Go's
-// cmd/go/internal/cache package.
package cache
import (
"bytes"
- "crypto/sha256"
+ "encoding/gob"
"encoding/hex"
"errors"
"fmt"
- "io"
- "os"
- "path/filepath"
- "strconv"
+ "runtime"
+ "slices"
"strings"
- "time"
+ "sync"
- "github.com/golangci/golangci-lint/internal/renameio"
- "github.com/golangci/golangci-lint/internal/robustio"
+ "golang.org/x/exp/maps"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/internal/go/cache"
+ "github.com/golangci/golangci-lint/pkg/logutils"
+ "github.com/golangci/golangci-lint/pkg/timeutils"
+)
+
+type HashMode int
+
+const (
+ HashModeNeedOnlySelf HashMode = iota
+ HashModeNeedDirectDeps
+ HashModeNeedAllDeps
)
-// An ActionID is a cache action key, the hash of a complete description of a
-// repeatable computation (command line, environment variables,
-// input file contents, executable contents).
-type ActionID [HashSize]byte
+var ErrMissing = errors.New("missing data")
-// An OutputID is a cache output key, the hash of an output of a computation.
-type OutputID [HashSize]byte
+type hashResults map[HashMode]string
-// A Cache is a package cache, backed by a file system directory tree.
+// Cache is a per-package data cache.
+// A cached data is invalidated when package,
+// or it's dependencies change.
type Cache struct {
- dir string
- now func() time.Time
+ lowLevelCache cache.Cache
+ pkgHashes sync.Map
+ sw *timeutils.Stopwatch
+ log logutils.Log
+ ioSem chan struct{} // semaphore limiting parallel IO
+}
+
+func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) {
+ return &Cache{
+ lowLevelCache: cache.Default(),
+ sw: sw,
+ log: log,
+ ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)),
+ }, nil
}
-// Open opens and returns the cache in the given directory.
-//
-// It is safe for multiple processes on a single machine to use the
-// same cache directory in a local file system simultaneously.
-// They will coordinate using operating system file locks and may
-// duplicate effort but will not corrupt the cache.
-//
-// However, it is NOT safe for multiple processes on different machines
-// to share a cache directory (for example, if the directory were stored
-// in a network file system). File locking is notoriously unreliable in
-// network file systems and may not suffice to protect the cache.
-func Open(dir string) (*Cache, error) {
- info, err := os.Stat(dir)
+func (c *Cache) Close() {
+ err := c.sw.TrackStageErr("close", c.lowLevelCache.Close)
if err != nil {
- return nil, err
+ c.log.Errorf("cache close: %v", err)
}
- if !info.IsDir() {
- return nil, &os.PathError{Op: "open", Path: dir, Err: errors.New("not a directory")}
- }
- for i := 0; i < 256; i++ {
- name := filepath.Join(dir, fmt.Sprintf("%02x", i))
- if err := os.MkdirAll(name, 0744); err != nil {
- return nil, err
- }
- }
- c := &Cache{
- dir: dir,
- now: time.Now,
- }
- return c, nil
}
-// fileName returns the name of the file corresponding to the given id.
-func (c *Cache) fileName(id [HashSize]byte, key string) string {
- return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key)
-}
+func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error {
+ buf, err := c.encode(data)
+ if err != nil {
+ return err
+ }
-var errMissing = errors.New("cache entry not found")
+ actionID, err := c.buildKey(pkg, mode, key)
+ if err != nil {
+ return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
+ }
+
+ err = c.putBytes(actionID, buf)
+ if err != nil {
+ return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err)
+ }
-func IsErrMissing(err error) bool {
- return errors.Is(err, errMissing)
+ return nil
}
-const (
- // action entry file is "v1 <hex id> <hex out> <decimal size space-padded to 20 bytes> <unixnano space-padded to 20 bytes>\n"
- hexSize = HashSize * 2
- entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1
-)
+func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error {
+ actionID, err := c.buildKey(pkg, mode, key)
+ if err != nil {
+ return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
+ }
-// verify controls whether to run the cache in verify mode.
-// In verify mode, the cache always returns errMissing from Get
-// but then double-checks in Put that the data being written
-// exactly matches any existing entry. This provides an easy
-// way to detect program behavior that would have been different
-// had the cache entry been returned from Get.
-//
-// verify is enabled by setting the environment variable
-// GODEBUG=gocacheverify=1.
-var verify = false
-
-// DebugTest is set when GODEBUG=gocachetest=1 is in the environment.
-var DebugTest = false
-
-func init() { initEnv() }
-
-func initEnv() {
- verify = false
- debugHash = false
- debug := strings.Split(os.Getenv("GODEBUG"), ",")
- for _, f := range debug {
- if f == "gocacheverify=1" {
- verify = true
- }
- if f == "gocachehash=1" {
- debugHash = true
- }
- if f == "gocachetest=1" {
- DebugTest = true
+ cachedData, err := c.getBytes(actionID)
+ if err != nil {
+ if cache.IsErrMissing(err) {
+ return ErrMissing
}
+ return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err)
}
-}
-// Get looks up the action ID in the cache,
-// returning the corresponding output ID and file size, if any.
-// Note that finding an output ID does not guarantee that the
-// saved file for that output ID is still available.
-func (c *Cache) Get(id ActionID) (Entry, error) {
- if verify {
- return Entry{}, errMissing
- }
- return c.get(id)
+ return c.decode(cachedData, data)
}
-type Entry struct {
- OutputID OutputID
- Size int64
- Time time.Time
+func (c *Cache) buildKey(pkg *packages.Package, mode HashMode, key string) (cache.ActionID, error) {
+ return timeutils.TrackStage(c.sw, "key build", func() (cache.ActionID, error) {
+ actionID, err := c.pkgActionID(pkg, mode)
+ if err != nil {
+ return actionID, err
+ }
+
+ subkey, subkeyErr := cache.Subkey(actionID, key)
+ if subkeyErr != nil {
+ return actionID, fmt.Errorf("failed to build subkey: %w", subkeyErr)
+ }
+
+ return subkey, nil
+ })
}
-// get is Get but does not respect verify mode, so that Put can use it.
-func (c *Cache) get(id ActionID) (Entry, error) {
- missing := func() (Entry, error) {
- return Entry{}, errMissing
- }
- failed := func(err error) (Entry, error) {
- return Entry{}, err
- }
- fileName := c.fileName(id, "a")
- f, err := os.Open(fileName)
+func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) {
+ hash, err := c.packageHash(pkg, mode)
if err != nil {
- if os.IsNotExist(err) {
- return missing()
- }
- return failed(err)
- }
- defer f.Close()
- entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
- if n, readErr := io.ReadFull(f, entry); n != entrySize || readErr != io.ErrUnexpectedEOF {
- return failed(fmt.Errorf("read %d/%d bytes from %s with error %w", n, entrySize, fileName, readErr))
- }
- if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
- return failed(fmt.Errorf("bad data in %s", fileName))
- }
- eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
- eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
- esize, entry := entry[1:1+20], entry[1+20:]
- etime := entry[1 : 1+20]
- var buf [HashSize]byte
- if _, err = hex.Decode(buf[:], eid); err != nil || buf != id {
- return failed(fmt.Errorf("failed to hex decode eid data in %s: %w", fileName, err))
- }
- if _, err = hex.Decode(buf[:], eout); err != nil {
- return failed(fmt.Errorf("failed to hex decode eout data in %s: %w", fileName, err))
- }
- i := 0
- for i < len(esize) && esize[i] == ' ' {
- i++
- }
- size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
- if err != nil || size < 0 {
- return failed(fmt.Errorf("failed to parse esize int from %s with error %w", fileName, err))
- }
- i = 0
- for i < len(etime) && etime[i] == ' ' {
- i++
- }
- tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
- if err != nil || tm < 0 {
- return failed(fmt.Errorf("failed to parse etime int from %s with error %w", fileName, err))
+ return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err)
}
- if err = c.used(fileName); err != nil {
- return failed(fmt.Errorf("failed to mark %s as used: %w", fileName, err))
+ key, err := cache.NewHash("action ID")
+ if err != nil {
+ return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err)
}
- return Entry{buf, size, time.Unix(0, tm)}, nil
+ fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
+ fmt.Fprintf(key, "pkghash %s\n", hash)
+
+ return key.Sum(), nil
}
-// GetBytes looks up the action ID in the cache and returns
-// the corresponding output bytes.
-// GetBytes should only be used for data that can be expected to fit in memory.
-func (c *Cache) GetBytes(id ActionID) ([]byte, Entry, error) {
- entry, err := c.Get(id)
- if err != nil {
- return nil, entry, err
- }
- outputFile, err := c.OutputFile(entry.OutputID)
- if err != nil {
- return nil, entry, err
+func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) {
+ results, found := c.pkgHashes.Load(pkg)
+ if found {
+ hashRes := results.(hashResults)
+ if result, ok := hashRes[mode]; ok {
+ return result, nil
+ }
+
+ return "", fmt.Errorf("no mode %d in hash result", mode)
}
- data, err := robustio.ReadFile(outputFile)
+ hashRes, err := c.computePkgHash(pkg)
if err != nil {
- return nil, entry, err
+ return "", err
}
- if sha256.Sum256(data) != entry.OutputID {
- return nil, entry, errMissing
+ result, found := hashRes[mode]
+ if !found {
+ return "", fmt.Errorf("invalid mode %d", mode)
}
- return data, entry, nil
+
+ c.pkgHashes.Store(pkg, hashRes)
+
+ return result, nil
}
-// OutputFile returns the name of the cache file storing output with the given OutputID.
-func (c *Cache) OutputFile(out OutputID) (string, error) {
- file := c.fileName(out, "d")
- if err := c.used(file); err != nil {
- return "", err
+// computePkgHash computes a package's hash.
+// The hash is based on all Go files that make up the package,
+// as well as the hashes of imported packages.
+func (c *Cache) computePkgHash(pkg *packages.Package) (hashResults, error) {
+ key, err := cache.NewHash("package hash")
+ if err != nil {
+ return nil, fmt.Errorf("failed to make a hash: %w", err)
}
- return file, nil
-}
-// Time constants for cache expiration.
-//
-// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour),
-// to avoid causing many unnecessary inode updates. The mtimes therefore
-// roughly reflect "time of last use" but may in fact be older by at most an hour.
-//
-// We scan the cache for entries to delete at most once per trimInterval (1 day).
-//
-// When we do scan the cache, we delete entries that have not been used for
-// at least trimLimit (5 days). Statistics gathered from a month of usage by
-// Go developers found that essentially all reuse of cached entries happened
-// within 5 days of the previous reuse. See golang.org/issue/22990.
-const (
- mtimeInterval = 1 * time.Hour
- trimInterval = 24 * time.Hour
- trimLimit = 5 * 24 * time.Hour
-)
+ hashRes := hashResults{}
-// used makes a best-effort attempt to update mtime on file,
-// so that mtime reflects cache access time.
-//
-// Because the reflection only needs to be approximate,
-// and to reduce the amount of disk activity caused by using
-// cache entries, used only updates the mtime if the current
-// mtime is more than an hour old. This heuristic eliminates
-// nearly all the mtime updates that would otherwise happen,
-// while still keeping the mtimes useful for cache trimming.
-func (c *Cache) used(file string) error {
- info, err := os.Stat(file)
- if err != nil {
- if os.IsNotExist(err) {
- return errMissing
+ fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
+
+ for _, f := range pkg.CompiledGoFiles {
+ h, fErr := c.fileHash(f)
+ if fErr != nil {
+ return nil, fmt.Errorf("failed to calculate file %s hash: %w", f, fErr)
}
- return fmt.Errorf("failed to stat file %s: %w", file, err)
- }
- if c.now().Sub(info.ModTime()) < mtimeInterval {
- return nil
+ fmt.Fprintf(key, "file %s %x\n", f, h)
}
- if err := os.Chtimes(file, c.now(), c.now()); err != nil {
- return fmt.Errorf("failed to change time of file %s: %w", file, err)
- }
+ curSum := key.Sum()
+ hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:])
- return nil
-}
+ imps := maps.Values(pkg.Imports)
-// Trim removes old cache entries that are likely not to be reused.
-func (c *Cache) Trim() {
- now := c.now()
-
- // We maintain in dir/trim.txt the time of the last completed cache trim.
- // If the cache has been trimmed recently enough, do nothing.
- // This is the common case.
- data, _ := renameio.ReadFile(filepath.Join(c.dir, "trim.txt"))
- t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64)
- if err == nil && now.Sub(time.Unix(t, 0)) < trimInterval {
- return
- }
+ slices.SortFunc(imps, func(a, b *packages.Package) int {
+ return strings.Compare(a.PkgPath, b.PkgPath)
+ })
- // Trim each of the 256 subdirectories.
- // We subtract an additional mtimeInterval
- // to account for the imprecision of our "last used" mtimes.
- cutoff := now.Add(-trimLimit - mtimeInterval)
- for i := 0; i < 256; i++ {
- subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i))
- c.trimSubdir(subdir, cutoff)
+ if err := c.computeDepsHash(HashModeNeedOnlySelf, imps, key); err != nil {
+ return nil, err
}
- // Ignore errors from here: if we don't write the complete timestamp, the
- // cache will appear older than it is, and we'll trim it again next time.
- _ = renameio.WriteFile(filepath.Join(c.dir, "trim.txt"), []byte(fmt.Sprintf("%d", now.Unix())), 0666)
-}
+ curSum = key.Sum()
+ hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:])
-// trimSubdir trims a single cache subdirectory.
-func (c *Cache) trimSubdir(subdir string, cutoff time.Time) {
- // Read all directory entries from subdir before removing
- // any files, in case removing files invalidates the file offset
- // in the directory scan. Also, ignore error from f.Readdirnames,
- // because we don't care about reporting the error, and we still
- // want to process any entries found before the error.
- f, err := os.Open(subdir)
- if err != nil {
- return
+ if err := c.computeDepsHash(HashModeNeedAllDeps, imps, key); err != nil {
+ return nil, err
}
- names, _ := f.Readdirnames(-1)
- f.Close()
- for _, name := range names {
- // Remove only cache entries (xxxx-a and xxxx-d).
- if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") {
+ curSum = key.Sum()
+ hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:])
+
+ return hashRes, nil
+}
+
+func (c *Cache) computeDepsHash(depMode HashMode, imps []*packages.Package, key *cache.Hash) error {
+ for _, dep := range imps {
+ if dep.PkgPath == "unsafe" {
continue
}
- entry := filepath.Join(subdir, name)
- info, err := os.Stat(entry)
- if err == nil && info.ModTime().Before(cutoff) {
- os.Remove(entry)
+
+ depHash, err := c.packageHash(dep, depMode)
+ if err != nil {
+ return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, err)
}
+
+ fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash)
}
+
+ return nil
}
-// putIndexEntry adds an entry to the cache recording that executing the action
-// with the given id produces an output with the given output id (hash) and size.
-func (c *Cache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error {
- // Note: We expect that for one reason or another it may happen
- // that repeating an action produces a different output hash
- // (for example, if the output contains a time stamp or temp dir name).
- // While not ideal, this is also not a correctness problem, so we
- // don't make a big deal about it. In particular, we leave the action
- // cache entries writable specifically so that they can be overwritten.
- //
- // Setting GODEBUG=gocacheverify=1 does make a big deal:
- // in verify mode we are double-checking that the cache entries
- // are entirely reproducible. As just noted, this may be unrealistic
- // in some cases but the check is also useful for shaking out real bugs.
- entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
-
- if verify && allowVerify {
- old, err := c.get(id)
- if err == nil && (old.OutputID != out || old.Size != size) {
- // panic to show stack trace, so we can see what code is generating this cache entry.
- msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
- panic(msg)
- }
- }
- file := c.fileName(id, "a")
+func (c *Cache) putBytes(actionID cache.ActionID, buf *bytes.Buffer) error {
+ c.ioSem <- struct{}{}
+
+ err := c.sw.TrackStageErr("cache io", func() error {
+ return cache.PutBytes(c.lowLevelCache, actionID, buf.Bytes())
+ })
+
+ <-c.ioSem
- // Copy file to cache directory.
- mode := os.O_WRONLY | os.O_CREATE
- f, err := os.OpenFile(file, mode, 0666)
- if err != nil {
- return err
- }
- _, err = f.WriteString(entry)
- if err == nil {
- // Truncate the file only *after* writing it.
- // (This should be a no-op, but truncate just in case of previous corruption.)
- //
- // This differs from os.WriteFile, which truncates to 0 *before* writing
- // via os.O_TRUNC. Truncating only after writing ensures that a second write
- // of the same content to the same file is idempotent, and does not — even
- // temporarily! — undo the effect of the first write.
- err = f.Truncate(int64(len(entry)))
- }
- if closeErr := f.Close(); err == nil {
- err = closeErr
- }
if err != nil {
- // TODO(bcmills): This Remove potentially races with another go command writing to file.
- // Can we eliminate it?
- os.Remove(file)
return err
}
- if err = os.Chtimes(file, c.now(), c.now()); err != nil { // mainly for tests
- return fmt.Errorf("failed to change time of file %s: %w", file, err)
- }
return nil
}
-// Put stores the given output in the cache as the output for the action ID.
-// It may read file twice. The content of file must not change between the two passes.
-func (c *Cache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
- return c.put(id, file, true)
-}
+func (c *Cache) getBytes(actionID cache.ActionID) ([]byte, error) {
+ c.ioSem <- struct{}{}
-// PutNoVerify is like Put but disables the verify check
-// when GODEBUG=goverifycache=1 is set.
-// It is meant for data that is OK to cache but that we expect to vary slightly from run to run,
-// like test output containing times and the like.
-func (c *Cache) PutNoVerify(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
- return c.put(id, file, false)
-}
+ cachedData, err := timeutils.TrackStage(c.sw, "cache io", func() ([]byte, error) {
+ b, _, errGB := cache.GetBytes(c.lowLevelCache, actionID)
+ return b, errGB
+ })
-func (c *Cache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) {
- // Compute output ID.
- h := sha256.New()
- if _, err := file.Seek(0, 0); err != nil {
- return OutputID{}, 0, err
- }
- size, err := io.Copy(h, file)
- if err != nil {
- return OutputID{}, 0, err
- }
- var out OutputID
- h.Sum(out[:0])
+ <-c.ioSem
- // Copy to cached output file (if not already present).
- if err := c.copyFile(file, out, size); err != nil {
- return out, size, err
+ if err != nil {
+ return nil, err
}
- // Add to cache index.
- return out, size, c.putIndexEntry(id, out, size, allowVerify)
+ return cachedData, nil
}
-// PutBytes stores the given bytes in the cache as the output for the action ID.
-func (c *Cache) PutBytes(id ActionID, data []byte) error {
- _, _, err := c.Put(id, bytes.NewReader(data))
- return err
-}
+func (c *Cache) fileHash(f string) ([cache.HashSize]byte, error) {
+ c.ioSem <- struct{}{}
-// copyFile copies file into the cache, expecting it to have the given
-// output ID and size, if that file is not present already.
-func (c *Cache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
- name := c.fileName(out, "d")
- info, err := os.Stat(name)
- if err == nil && info.Size() == size {
- // Check hash.
- if f, openErr := os.Open(name); openErr == nil {
- h := sha256.New()
- if _, copyErr := io.Copy(h, f); copyErr != nil {
- return fmt.Errorf("failed to copy to sha256: %w", copyErr)
- }
-
- f.Close()
- var out2 OutputID
- h.Sum(out2[:0])
- if out == out2 {
- return nil
- }
- }
- // Hash did not match. Fall through and rewrite file.
- }
+ h, err := cache.FileHash(f)
+
+ <-c.ioSem
- // Copy file to cache directory.
- mode := os.O_RDWR | os.O_CREATE
- if err == nil && info.Size() > size { // shouldn't happen but fix in case
- mode |= os.O_TRUNC
- }
- f, err := os.OpenFile(name, mode, 0666)
if err != nil {
- return err
- }
- defer f.Close()
- if size == 0 {
- // File now exists with correct size.
- // Only one possible zero-length file, so contents are OK too.
- // Early return here makes sure there's a "last byte" for code below.
- return nil
+ return [cache.HashSize]byte{}, err
}
- // From here on, if any of the I/O writing the file fails,
- // we make a best-effort attempt to truncate the file f
- // before returning, to avoid leaving bad bytes in the file.
+ return h, nil
+}
- // Copy file to f, but also into h to double-check hash.
- if _, err = file.Seek(0, 0); err != nil {
- _ = f.Truncate(0)
- return err
- }
- h := sha256.New()
- w := io.MultiWriter(f, h)
- if _, err = io.CopyN(w, file, size-1); err != nil {
- _ = f.Truncate(0)
- return err
- }
- // Check last byte before writing it; writing it will make the size match
- // what other processes expect to find and might cause them to start
- // using the file.
- buf := make([]byte, 1)
- if _, err = file.Read(buf); err != nil {
- _ = f.Truncate(0)
- return err
- }
- if n, wErr := h.Write(buf); n != len(buf) {
- return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr)
+func (c *Cache) encode(data any) (*bytes.Buffer, error) {
+ buf := &bytes.Buffer{}
+ err := c.sw.TrackStageErr("gob", func() error {
+ return gob.NewEncoder(buf).Encode(data)
+ })
+ if err != nil {
+ return nil, fmt.Errorf("failed to gob encode: %w", err)
}
- sum := h.Sum(nil)
- if !bytes.Equal(sum, out[:]) {
- _ = f.Truncate(0)
- return errors.New("file content changed underfoot")
- }
+ return buf, nil
+}
- // Commit cache file entry.
- if _, err = f.Write(buf); err != nil {
- _ = f.Truncate(0)
- return err
- }
- if err = f.Close(); err != nil {
- // Data might not have been written,
- // but file may look like it is the right size.
- // To be extra careful, remove cached file.
- os.Remove(name)
- return err
- }
- if err = os.Chtimes(name, c.now(), c.now()); err != nil { // mainly for tests
- return fmt.Errorf("failed to change time of file %s: %w", name, err)
+func (c *Cache) decode(b []byte, data any) error {
+ err := c.sw.TrackStageErr("gob", func() error {
+ return gob.NewDecoder(bytes.NewReader(b)).Decode(data)
+ })
+ if err != nil {
+ return fmt.Errorf("failed to gob decode: %w", err)
}
return nil
}
+
+func SetSalt(b *bytes.Buffer) {
+ cache.SetSalt(b.Bytes())
+}
+
+func DefaultDir() string {
+ cacheDir, _ := cache.DefaultDir()
+ return cacheDir
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md
deleted file mode 100644
index b469711ed..000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/readme.md
+++ /dev/null
@@ -1,18 +0,0 @@
-# cache
-
-Extracted from go/src/cmd/go/internal/cache/
-I don't know what version of Go this package was pulled from.
-
-Adapted for golangci-lint:
-- https://github.com/golangci/golangci-lint/pull/699
-- https://github.com/golangci/golangci-lint/pull/779
-- https://github.com/golangci/golangci-lint/pull/788
-- https://github.com/golangci/golangci-lint/pull/808
-- https://github.com/golangci/golangci-lint/pull/1063
-- https://github.com/golangci/golangci-lint/pull/1070
-- https://github.com/golangci/golangci-lint/pull/1162
-- https://github.com/golangci/golangci-lint/pull/2318
-- https://github.com/golangci/golangci-lint/pull/2352
-- https://github.com/golangci/golangci-lint/pull/3012
-- https://github.com/golangci/golangci-lint/pull/3096
-- https://github.com/golangci/golangci-lint/pull/3204
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE b/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE
new file mode 100644
index 000000000..6a66aea5e
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go
new file mode 100644
index 000000000..7bf4f1d66
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache.go
@@ -0,0 +1,663 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package cache implements a build artifact cache.
+//
+// This package is a slightly modified fork of Go's
+// cmd/go/internal/cache package.
+package cache
+
+import (
+ "bytes"
+ "crypto/sha256"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golangci/golangci-lint/internal/go/mmap"
+ "github.com/golangci/golangci-lint/internal/go/robustio"
+ "github.com/rogpeppe/go-internal/lockedfile"
+)
+
+// An ActionID is a cache action key, the hash of a complete description of a
+// repeatable computation (command line, environment variables,
+// input file contents, executable contents).
+type ActionID [HashSize]byte
+
+// An OutputID is a cache output key, the hash of an output of a computation.
+type OutputID [HashSize]byte
+
+// Cache is the interface as used by the cmd/go.
+type Cache interface {
+ // Get returns the cache entry for the provided ActionID.
+ // On miss, the error type should be of type *entryNotFoundError.
+ //
+ // After a success call to Get, OutputFile(Entry.OutputID) must
+ // exist on disk for until Close is called (at the end of the process).
+ Get(ActionID) (Entry, error)
+
+ // Put adds an item to the cache.
+ //
+ // The seeker is only used to seek to the beginning. After a call to Put,
+ // the seek position is not guaranteed to be in any particular state.
+ //
+ // As a special case, if the ReadSeeker is of type noVerifyReadSeeker,
+ // the verification from GODEBUG=goverifycache=1 is skipped.
+ //
+ // After a success call to Get, OutputFile(Entry.OutputID) must
+ // exist on disk for until Close is called (at the end of the process).
+ Put(ActionID, io.ReadSeeker) (_ OutputID, size int64, _ error)
+
+ // Close is called at the end of the go process. Implementations can do
+ // cache cleanup work at this phase, or wait for and report any errors from
+ // background cleanup work started earlier. Any cache trimming should in one
+ // process should not violate cause the invariants of this interface to be
+ // violated in another process. Namely, a cache trim from one process should
+ // not delete an ObjectID from disk that was recently Get or Put from
+ // another process. As a rule of thumb, don't trim things used in the last
+ // day.
+ Close() error
+
+ // OutputFile returns the path on disk where OutputID is stored.
+ //
+ // It's only called after a successful get or put call so it doesn't need
+ // to return an error; it's assumed that if the previous get or put succeeded,
+ // it's already on disk.
+ OutputFile(OutputID) string
+
+ // FuzzDir returns where fuzz files are stored.
+ FuzzDir() string
+}
+
+// A Cache is a package cache, backed by a file system directory tree.
+type DiskCache struct {
+ dir string
+ now func() time.Time
+}
+
+// Open opens and returns the cache in the given directory.
+//
+// It is safe for multiple processes on a single machine to use the
+// same cache directory in a local file system simultaneously.
+// They will coordinate using operating system file locks and may
+// duplicate effort but will not corrupt the cache.
+//
+// However, it is NOT safe for multiple processes on different machines
+// to share a cache directory (for example, if the directory were stored
+// in a network file system). File locking is notoriously unreliable in
+// network file systems and may not suffice to protect the cache.
+func Open(dir string) (*DiskCache, error) {
+ info, err := os.Stat(dir)
+ if err != nil {
+ return nil, err
+ }
+ if !info.IsDir() {
+ return nil, &fs.PathError{Op: "open", Path: dir, Err: fmt.Errorf("not a directory")}
+ }
+ for i := 0; i < 256; i++ {
+ name := filepath.Join(dir, fmt.Sprintf("%02x", i))
+ if err := os.MkdirAll(name, 0744); err != nil {
+ return nil, err
+ }
+ }
+ c := &DiskCache{
+ dir: dir,
+ now: time.Now,
+ }
+ return c, nil
+}
+
+// fileName returns the name of the file corresponding to the given id.
+func (c *DiskCache) fileName(id [HashSize]byte, key string) string {
+ return filepath.Join(c.dir, fmt.Sprintf("%02x", id[0]), fmt.Sprintf("%x", id)+"-"+key)
+}
+
+// An entryNotFoundError indicates that a cache entry was not found, with an
+// optional underlying reason.
+type entryNotFoundError struct {
+ Err error
+}
+
+func (e *entryNotFoundError) Error() string {
+ if e.Err == nil {
+ return "cache entry not found"
+ }
+ return fmt.Sprintf("cache entry not found: %v", e.Err)
+}
+
+func (e *entryNotFoundError) Unwrap() error {
+ return e.Err
+}
+
+const (
+ // action entry file is "v1 <hex id> <hex out> <decimal size space-padded to 20 bytes> <unixnano space-padded to 20 bytes>\n"
+ hexSize = HashSize * 2
+ entrySize = 2 + 1 + hexSize + 1 + hexSize + 1 + 20 + 1 + 20 + 1
+)
+
+// verify controls whether to run the cache in verify mode.
+// In verify mode, the cache always returns errMissing from Get
+// but then double-checks in Put that the data being written
+// exactly matches any existing entry. This provides an easy
+// way to detect program behavior that would have been different
+// had the cache entry been returned from Get.
+//
+// verify is enabled by setting the environment variable
+// GODEBUG=gocacheverify=1.
+var verify = false
+
+var errVerifyMode = errors.New("gocacheverify=1")
+
+// DebugTest is set when GODEBUG=gocachetest=1 is in the environment.
+var DebugTest = false
+
+// func init() { initEnv() }
+
+// var (
+// gocacheverify = godebug.New("gocacheverify")
+// gocachehash = godebug.New("gocachehash")
+// gocachetest = godebug.New("gocachetest")
+// )
+
+// func initEnv() {
+// if gocacheverify.Value() == "1" {
+// gocacheverify.IncNonDefault()
+// verify = true
+// }
+// if gocachehash.Value() == "1" {
+// gocachehash.IncNonDefault()
+// debugHash = true
+// }
+// if gocachetest.Value() == "1" {
+// gocachetest.IncNonDefault()
+// DebugTest = true
+// }
+// }
+
+// Get looks up the action ID in the cache,
+// returning the corresponding output ID and file size, if any.
+// Note that finding an output ID does not guarantee that the
+// saved file for that output ID is still available.
+func (c *DiskCache) Get(id ActionID) (Entry, error) {
+ if verify {
+ return Entry{}, &entryNotFoundError{Err: errVerifyMode}
+ }
+ return c.get(id)
+}
+
+type Entry struct {
+ OutputID OutputID
+ Size int64
+ Time time.Time // when added to cache
+}
+
+// get is Get but does not respect verify mode, so that Put can use it.
+func (c *DiskCache) get(id ActionID) (Entry, error) {
+ missing := func(reason error) (Entry, error) {
+ return Entry{}, &entryNotFoundError{Err: reason}
+ }
+ f, err := os.Open(c.fileName(id, "a"))
+ if err != nil {
+ return missing(err)
+ }
+ defer f.Close()
+ entry := make([]byte, entrySize+1) // +1 to detect whether f is too long
+ if n, err := io.ReadFull(f, entry); n > entrySize {
+ return missing(errors.New("too long"))
+ } else if err != io.ErrUnexpectedEOF {
+ if err == io.EOF {
+ return missing(errors.New("file is empty"))
+ }
+ return missing(err)
+ } else if n < entrySize {
+ return missing(errors.New("entry file incomplete"))
+ }
+ if entry[0] != 'v' || entry[1] != '1' || entry[2] != ' ' || entry[3+hexSize] != ' ' || entry[3+hexSize+1+hexSize] != ' ' || entry[3+hexSize+1+hexSize+1+20] != ' ' || entry[entrySize-1] != '\n' {
+ return missing(errors.New("invalid header"))
+ }
+ eid, entry := entry[3:3+hexSize], entry[3+hexSize:]
+ eout, entry := entry[1:1+hexSize], entry[1+hexSize:]
+ esize, entry := entry[1:1+20], entry[1+20:]
+ etime, entry := entry[1:1+20], entry[1+20:]
+ var buf [HashSize]byte
+ if _, err := hex.Decode(buf[:], eid); err != nil {
+ return missing(fmt.Errorf("decoding ID: %v", err))
+ } else if buf != id {
+ return missing(errors.New("mismatched ID"))
+ }
+ if _, err := hex.Decode(buf[:], eout); err != nil {
+ return missing(fmt.Errorf("decoding output ID: %v", err))
+ }
+ i := 0
+ for i < len(esize) && esize[i] == ' ' {
+ i++
+ }
+ size, err := strconv.ParseInt(string(esize[i:]), 10, 64)
+ if err != nil {
+ return missing(fmt.Errorf("parsing size: %v", err))
+ } else if size < 0 {
+ return missing(errors.New("negative size"))
+ }
+ i = 0
+ for i < len(etime) && etime[i] == ' ' {
+ i++
+ }
+ tm, err := strconv.ParseInt(string(etime[i:]), 10, 64)
+ if err != nil {
+ return missing(fmt.Errorf("parsing timestamp: %v", err))
+ } else if tm < 0 {
+ return missing(errors.New("negative timestamp"))
+ }
+
+ err = c.used(c.fileName(id, "a"))
+ if err != nil {
+ return Entry{}, fmt.Errorf("failed to mark %s as used: %w", c.fileName(id, "a"), err)
+ }
+
+ return Entry{buf, size, time.Unix(0, tm)}, nil
+}
+
+// GetFile looks up the action ID in the cache and returns
+// the name of the corresponding data file.
+func GetFile(c Cache, id ActionID) (file string, entry Entry, err error) {
+ entry, err = c.Get(id)
+ if err != nil {
+ return "", Entry{}, err
+ }
+ file = c.OutputFile(entry.OutputID)
+ info, err := os.Stat(file)
+ if err != nil {
+ return "", Entry{}, &entryNotFoundError{Err: err}
+ }
+ if info.Size() != entry.Size {
+ return "", Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")}
+ }
+ return file, entry, nil
+}
+
+// GetBytes looks up the action ID in the cache and returns
+// the corresponding output bytes.
+// GetBytes should only be used for data that can be expected to fit in memory.
+func GetBytes(c Cache, id ActionID) ([]byte, Entry, error) {
+ entry, err := c.Get(id)
+ if err != nil {
+ return nil, entry, err
+ }
+ data, err := robustio.ReadFile(c.OutputFile(entry.OutputID))
+ if err != nil {
+ return nil, entry, err
+ }
+ if sha256.Sum256(data) != entry.OutputID {
+ return nil, entry, &entryNotFoundError{Err: errors.New("bad checksum")}
+ }
+ return data, entry, nil
+}
+
+// GetMmap looks up the action ID in the cache and returns
+// the corresponding output bytes.
+// GetMmap should only be used for data that can be expected to fit in memory.
+func GetMmap(c Cache, id ActionID) ([]byte, Entry, error) {
+ entry, err := c.Get(id)
+ if err != nil {
+ return nil, entry, err
+ }
+ md, err := mmap.Mmap(c.OutputFile(entry.OutputID))
+ if err != nil {
+ return nil, Entry{}, err
+ }
+ if int64(len(md.Data)) != entry.Size {
+ return nil, Entry{}, &entryNotFoundError{Err: errors.New("file incomplete")}
+ }
+ return md.Data, entry, nil
+}
+
+// OutputFile returns the name of the cache file storing output with the given OutputID.
+func (c *DiskCache) OutputFile(out OutputID) string {
+ file := c.fileName(out, "d")
+ c.used(file)
+ return file
+}
+
+// Time constants for cache expiration.
+//
+// We set the mtime on a cache file on each use, but at most one per mtimeInterval (1 hour),
+// to avoid causing many unnecessary inode updates. The mtimes therefore
+// roughly reflect "time of last use" but may in fact be older by at most an hour.
+//
+// We scan the cache for entries to delete at most once per trimInterval (1 day).
+//
+// When we do scan the cache, we delete entries that have not been used for
+// at least trimLimit (5 days). Statistics gathered from a month of usage by
+// Go developers found that essentially all reuse of cached entries happened
+// within 5 days of the previous reuse. See golang.org/issue/22990.
+const (
+ mtimeInterval = 1 * time.Hour
+ trimInterval = 24 * time.Hour
+ trimLimit = 5 * 24 * time.Hour
+)
+
+// used makes a best-effort attempt to update mtime on file,
+// so that mtime reflects cache access time.
+//
+// Because the reflection only needs to be approximate,
+// and to reduce the amount of disk activity caused by using
+// cache entries, used only updates the mtime if the current
+// mtime is more than an hour old. This heuristic eliminates
+// nearly all of the mtime updates that would otherwise happen,
+// while still keeping the mtimes useful for cache trimming.
+func (c *DiskCache) used(file string) error {
+ info, err := os.Stat(file)
+ if err == nil && c.now().Sub(info.ModTime()) < mtimeInterval {
+ return nil
+ }
+
+ if err != nil {
+ if os.IsNotExist(err) {
+ return &entryNotFoundError{Err: err}
+ }
+ return &entryNotFoundError{Err: fmt.Errorf("failed to stat file %s: %w", file, err)}
+ }
+
+ err = os.Chtimes(file, c.now(), c.now())
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", file, err)
+ }
+
+ return nil
+}
+
+func (c *DiskCache) Close() error { return c.Trim() }
+
+// Trim removes old cache entries that are likely not to be reused.
+func (c *DiskCache) Trim() error {
+ now := c.now()
+
+ // We maintain in dir/trim.txt the time of the last completed cache trim.
+ // If the cache has been trimmed recently enough, do nothing.
+ // This is the common case.
+ // If the trim file is corrupt, detected if the file can't be parsed, or the
+ // trim time is too far in the future, attempt the trim anyway. It's possible that
+ // the cache was full when the corruption happened. Attempting a trim on
+ // an empty cache is cheap, so there wouldn't be a big performance hit in that case.
+ if data, err := lockedfile.Read(filepath.Join(c.dir, "trim.txt")); err == nil {
+ if t, err := strconv.ParseInt(strings.TrimSpace(string(data)), 10, 64); err == nil {
+ lastTrim := time.Unix(t, 0)
+ if d := now.Sub(lastTrim); d < trimInterval && d > -mtimeInterval {
+ return nil
+ }
+ }
+ }
+
+ // Trim each of the 256 subdirectories.
+ // We subtract an additional mtimeInterval
+ // to account for the imprecision of our "last used" mtimes.
+ cutoff := now.Add(-trimLimit - mtimeInterval)
+ for i := 0; i < 256; i++ {
+ subdir := filepath.Join(c.dir, fmt.Sprintf("%02x", i))
+ c.trimSubdir(subdir, cutoff)
+ }
+
+ // Ignore errors from here: if we don't write the complete timestamp, the
+ // cache will appear older than it is, and we'll trim it again next time.
+ var b bytes.Buffer
+ fmt.Fprintf(&b, "%d", now.Unix())
+ if err := lockedfile.Write(filepath.Join(c.dir, "trim.txt"), &b, 0666); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// trimSubdir trims a single cache subdirectory.
+func (c *DiskCache) trimSubdir(subdir string, cutoff time.Time) {
+ // Read all directory entries from subdir before removing
+ // any files, in case removing files invalidates the file offset
+ // in the directory scan. Also, ignore error from f.Readdirnames,
+ // because we don't care about reporting the error and we still
+ // want to process any entries found before the error.
+ f, err := os.Open(subdir)
+ if err != nil {
+ return
+ }
+ names, _ := f.Readdirnames(-1)
+ f.Close()
+
+ for _, name := range names {
+ // Remove only cache entries (xxxx-a and xxxx-d).
+ if !strings.HasSuffix(name, "-a") && !strings.HasSuffix(name, "-d") {
+ continue
+ }
+ entry := filepath.Join(subdir, name)
+ info, err := os.Stat(entry)
+ if err == nil && info.ModTime().Before(cutoff) {
+ os.Remove(entry)
+ }
+ }
+}
+
+// putIndexEntry adds an entry to the cache recording that executing the action
+// with the given id produces an output with the given output id (hash) and size.
+func (c *DiskCache) putIndexEntry(id ActionID, out OutputID, size int64, allowVerify bool) error {
+ // Note: We expect that for one reason or another it may happen
+ // that repeating an action produces a different output hash
+ // (for example, if the output contains a time stamp or temp dir name).
+ // While not ideal, this is also not a correctness problem, so we
+ // don't make a big deal about it. In particular, we leave the action
+ // cache entries writable specifically so that they can be overwritten.
+ //
+ // Setting GODEBUG=gocacheverify=1 does make a big deal:
+ // in verify mode we are double-checking that the cache entries
+ // are entirely reproducible. As just noted, this may be unrealistic
+ // in some cases but the check is also useful for shaking out real bugs.
+ entry := fmt.Sprintf("v1 %x %x %20d %20d\n", id, out, size, time.Now().UnixNano())
+ if verify && allowVerify {
+ old, err := c.get(id)
+ if err == nil && (old.OutputID != out || old.Size != size) {
+ // panic to show stack trace, so we can see what code is generating this cache entry.
+ msg := fmt.Sprintf("go: internal cache error: cache verify failed: id=%x changed:<<<\n%s\n>>>\nold: %x %d\nnew: %x %d", id, reverseHash(id), out, size, old.OutputID, old.Size)
+ panic(msg)
+ }
+ }
+ file := c.fileName(id, "a")
+
+ // Copy file to cache directory.
+ mode := os.O_WRONLY | os.O_CREATE
+ f, err := os.OpenFile(file, mode, 0666)
+ if err != nil {
+ return err
+ }
+ _, err = f.WriteString(entry)
+ if err == nil {
+ // Truncate the file only *after* writing it.
+ // (This should be a no-op, but truncate just in case of previous corruption.)
+ //
+ // This differs from os.WriteFile, which truncates to 0 *before* writing
+ // via os.O_TRUNC. Truncating only after writing ensures that a second write
+ // of the same content to the same file is idempotent, and does not — even
+ // temporarily! — undo the effect of the first write.
+ err = f.Truncate(int64(len(entry)))
+ }
+ if closeErr := f.Close(); err == nil {
+ err = closeErr
+ }
+ if err != nil {
+ // TODO(bcmills): This Remove potentially races with another go command writing to file.
+ // Can we eliminate it?
+ os.Remove(file)
+ return err
+ }
+ err = os.Chtimes(file, c.now(), c.now()) // mainly for tests
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", file, err)
+ }
+
+ return nil
+}
+
+// noVerifyReadSeeker is an io.ReadSeeker wrapper sentinel type
+// that says that Cache.Put should skip the verify check
+// (from GODEBUG=goverifycache=1).
+type noVerifyReadSeeker struct {
+ io.ReadSeeker
+}
+
+// Put stores the given output in the cache as the output for the action ID.
+// It may read file twice. The content of file must not change between the two passes.
+func (c *DiskCache) Put(id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
+ wrapper, isNoVerify := file.(noVerifyReadSeeker)
+ if isNoVerify {
+ file = wrapper.ReadSeeker
+ }
+ return c.put(id, file, !isNoVerify)
+}
+
+// PutNoVerify is like Put but disables the verify check
+// when GODEBUG=goverifycache=1 is set.
+// It is meant for data that is OK to cache but that we expect to vary slightly from run to run,
+// like test output containing times and the like.
+func PutNoVerify(c Cache, id ActionID, file io.ReadSeeker) (OutputID, int64, error) {
+ return c.Put(id, noVerifyReadSeeker{file})
+}
+
+func (c *DiskCache) put(id ActionID, file io.ReadSeeker, allowVerify bool) (OutputID, int64, error) {
+ // Compute output ID.
+ h := sha256.New()
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+ size, err := io.Copy(h, file)
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ var out OutputID
+ h.Sum(out[:0])
+
+ // Copy to cached output file (if not already present).
+ if err := c.copyFile(file, out, size); err != nil {
+ return out, size, err
+ }
+
+ // Add to cache index.
+ return out, size, c.putIndexEntry(id, out, size, allowVerify)
+}
+
+// PutBytes stores the given bytes in the cache as the output for the action ID.
+func PutBytes(c Cache, id ActionID, data []byte) error {
+ _, _, err := c.Put(id, bytes.NewReader(data))
+ return err
+}
+
+// copyFile copies file into the cache, expecting it to have the given
+// output ID and size, if that file is not present already.
+func (c *DiskCache) copyFile(file io.ReadSeeker, out OutputID, size int64) error {
+ name := c.fileName(out, "d")
+ info, err := os.Stat(name)
+ if err == nil && info.Size() == size {
+ // Check hash.
+ if f, err := os.Open(name); err == nil {
+ h := sha256.New()
+ _, copyErr := io.Copy(h, f)
+ if copyErr != nil {
+ return fmt.Errorf("failed to copy to sha256: %w", copyErr)
+ }
+
+ f.Close()
+ var out2 OutputID
+ h.Sum(out2[:0])
+ if out == out2 {
+ return nil
+ }
+ }
+ // Hash did not match. Fall through and rewrite file.
+ }
+
+ // Copy file to cache directory.
+ mode := os.O_RDWR | os.O_CREATE
+ if err == nil && info.Size() > size { // shouldn't happen but fix in case
+ mode |= os.O_TRUNC
+ }
+ f, err := os.OpenFile(name, mode, 0666)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+ if size == 0 {
+ // File now exists with correct size.
+ // Only one possible zero-length file, so contents are OK too.
+ // Early return here makes sure there's a "last byte" for code below.
+ return nil
+ }
+
+ // From here on, if any of the I/O writing the file fails,
+ // we make a best-effort attempt to truncate the file f
+ // before returning, to avoid leaving bad bytes in the file.
+
+ // Copy file to f, but also into h to double-check hash.
+ if _, err := file.Seek(0, 0); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ h := sha256.New()
+ w := io.MultiWriter(f, h)
+ if _, err := io.CopyN(w, file, size-1); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ // Check last byte before writing it; writing it will make the size match
+ // what other processes expect to find and might cause them to start
+ // using the file.
+ buf := make([]byte, 1)
+ if _, err := file.Read(buf); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ n, wErr := h.Write(buf)
+ if n != len(buf) {
+ return fmt.Errorf("wrote to hash %d/%d bytes with error %w", n, len(buf), wErr)
+ }
+
+ sum := h.Sum(nil)
+ if !bytes.Equal(sum, out[:]) {
+ f.Truncate(0)
+ return fmt.Errorf("file content changed underfoot")
+ }
+
+ // Commit cache file entry.
+ if _, err := f.Write(buf); err != nil {
+ f.Truncate(0)
+ return err
+ }
+ if err := f.Close(); err != nil {
+ // Data might not have been written,
+ // but file may look like it is the right size.
+ // To be extra careful, remove cached file.
+ os.Remove(name)
+ return err
+ }
+ err = os.Chtimes(name, c.now(), c.now()) // mainly for tests
+ if err != nil {
+ return fmt.Errorf("failed to change time of file %s: %w", name, err)
+ }
+
+ return nil
+}
+
+// FuzzDir returns a subdirectory within the cache for storing fuzzing data.
+// The subdirectory may not exist.
+//
+// This directory is managed by the internal/fuzz package. Files in this
+// directory aren't removed by the 'go clean -cache' command or by Trim.
+// They may be removed with 'go clean -fuzzcache'.
+//
+// TODO(#48526): make Trim remove unused files from this directory.
+func (c *DiskCache) FuzzDir() string {
+ return filepath.Join(c.dir, "fuzz")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go
new file mode 100644
index 000000000..b4f07738e
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/cache_gcil.go
@@ -0,0 +1,12 @@
+package cache
+
+import (
+ "errors"
+)
+
+// IsErrMissing allows to access to the internal error.
+// TODO(ldez) the handling of this error inside runner_action.go should be refactored.
+func IsErrMissing(err error) bool {
+ var errENF *entryNotFoundError
+ return errors.As(err, &errENF)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go
index 399cc84cf..7232f1ef3 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/default.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default.go
@@ -6,23 +6,22 @@ package cache
import (
"fmt"
- "log"
+ base "log"
"os"
"path/filepath"
"sync"
)
-const envGolangciLintCache = "GOLANGCI_LINT_CACHE"
-
// Default returns the default cache to use.
-func Default() (*Cache, error) {
+// It never returns nil.
+func Default() Cache {
defaultOnce.Do(initDefaultCache)
- return defaultCache, defaultDirErr
+ return defaultCache
}
var (
defaultOnce sync.Once
- defaultCache *Cache
+ defaultCache Cache
)
// cacheREADME is a message stored in a README in the cache directory.
@@ -34,32 +33,46 @@ const cacheREADME = `This directory holds cached build artifacts from golangci-l
// initDefaultCache does the work of finding the default cache
// the first time Default is called.
func initDefaultCache() {
- dir := DefaultDir()
+ dir, _ := DefaultDir()
+ if dir == "off" {
+ if defaultDirErr != nil {
+ base.Fatalf("build cache is required, but could not be located: %v", defaultDirErr)
+ }
+ base.Fatalf("build cache is disabled by %s=off, but required", envGolangciLintCache)
+ }
if err := os.MkdirAll(dir, 0744); err != nil {
- log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
}
if _, err := os.Stat(filepath.Join(dir, "README")); err != nil {
// Best effort.
if wErr := os.WriteFile(filepath.Join(dir, "README"), []byte(cacheREADME), 0666); wErr != nil {
- log.Fatalf("Failed to write README file to cache dir %s: %s", dir, err)
+ base.Fatalf("Failed to write README file to cache dir %s: %s", dir, err)
}
}
- c, err := Open(dir)
+ diskCache, err := Open(dir)
if err != nil {
- log.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ base.Fatalf("failed to initialize build cache at %s: %s\n", dir, err)
+ }
+
+ if v := os.Getenv(envGolangciLintCacheProg); v != "" {
+ defaultCache = startCacheProg(v, diskCache)
+ } else {
+ defaultCache = diskCache
}
- defaultCache = c
}
var (
- defaultDirOnce sync.Once
- defaultDir string
- defaultDirErr error
+ defaultDirOnce sync.Once
+ defaultDir string
+ defaultDirChanged bool // effective value differs from $GOLANGCI_LINT_CACHE
+ defaultDirErr error
)
// DefaultDir returns the effective GOLANGCI_LINT_CACHE setting.
-func DefaultDir() string {
+// It returns "off" if the cache is disabled,
+// and reports whether the effective value differs from GOLANGCI_LINT_CACHE.
+func DefaultDir() (string, bool) {
// Save the result of the first call to DefaultDir for later use in
// initDefaultCache. cmd/go/main.go explicitly sets GOCACHE so that
// subprocesses will inherit it, but that means initDefaultCache can't
@@ -67,10 +80,12 @@ func DefaultDir() string {
defaultDirOnce.Do(func() {
defaultDir = os.Getenv(envGolangciLintCache)
- if filepath.IsAbs(defaultDir) {
- return
- }
if defaultDir != "" {
+ defaultDirChanged = true
+ if filepath.IsAbs(defaultDir) || defaultDir == "off" {
+ return
+ }
+ defaultDir = "off"
defaultDirErr = fmt.Errorf("%s is not an absolute path", envGolangciLintCache)
return
}
@@ -78,11 +93,13 @@ func DefaultDir() string {
// Compute default location.
dir, err := os.UserCacheDir()
if err != nil {
+ defaultDir = "off"
+ defaultDirChanged = true
defaultDirErr = fmt.Errorf("%s is not defined and %w", envGolangciLintCache, err)
return
}
defaultDir = filepath.Join(dir, "golangci-lint")
})
- return defaultDir
+ return defaultDir, defaultDirChanged
}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go
new file mode 100644
index 000000000..a801f67f4
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/default_gcil.go
@@ -0,0 +1,6 @@
+package cache
+
+const (
+ envGolangciLintCache = "GOLANGCI_LINT_CACHE"
+ envGolangciLintCacheProg = "GOLANGCI_LINT_CACHEPROG"
+)
diff --git a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go
index 4ce79e325..d5169dd4c 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/cache/hash.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash.go
@@ -11,6 +11,7 @@ import (
"hash"
"io"
"os"
+ "strings"
"sync"
)
@@ -36,22 +37,26 @@ type Hash struct {
// which are still addressed by unsalted SHA256.
var hashSalt []byte
-func SetSalt(b []byte) {
- hashSalt = b
+// stripExperiment strips any GOEXPERIMENT configuration from the Go
+// version string.
+func stripExperiment(version string) string {
+ if i := strings.Index(version, " X:"); i >= 0 {
+ return version[:i]
+ }
+ return version
}
// Subkey returns an action ID corresponding to mixing a parent
// action ID with a string description of the subkey.
func Subkey(parent ActionID, desc string) (ActionID, error) {
h := sha256.New()
- const subkeyPrefix = "subkey:"
- if n, err := h.Write([]byte(subkeyPrefix)); n != len(subkeyPrefix) {
- return ActionID{}, fmt.Errorf("wrote %d/%d bytes of subkey prefix with error %s", n, len(subkeyPrefix), err)
- }
- if n, err := h.Write(parent[:]); n != len(parent) {
+ h.Write([]byte(("subkey:")))
+ n, err := h.Write(parent[:])
+ if n != len(parent) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of parent with error %s", n, len(parent), err)
}
- if n, err := h.Write([]byte(desc)); n != len(desc) {
+ n, err = h.Write([]byte(desc))
+ if n != len(desc) {
return ActionID{}, fmt.Errorf("wrote %d/%d bytes of desc with error %s", n, len(desc), err)
}
@@ -75,7 +80,8 @@ func NewHash(name string) (*Hash, error) {
if debugHash {
fmt.Fprintf(os.Stderr, "HASH[%s]\n", h.name)
}
- if n, err := h.Write(hashSalt); n != len(hashSalt) {
+ n, err := h.Write(hashSalt)
+ if n != len(hashSalt) {
return nil, fmt.Errorf("wrote %d/%d bytes of hash salt with error %s", n, len(hashSalt), err)
}
if verify {
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go
new file mode 100644
index 000000000..08749036b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/hash_gcil.go
@@ -0,0 +1,5 @@
+package cache
+
+func SetSalt(b []byte) {
+ hashSalt = b
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go
new file mode 100644
index 000000000..a93740a3c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/prog.go
@@ -0,0 +1,428 @@
+// Copyright 2023 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package cache
+
+import (
+ "bufio"
+ "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "io"
+ "log"
+ base "log"
+ "os"
+ "os/exec"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "github.com/golangci/golangci-lint/internal/go/quoted"
+)
+
+// ProgCache implements Cache via JSON messages over stdin/stdout to a child
+// helper process which can then implement whatever caching policy/mechanism it
+// wants.
+//
+// See https://github.com/golang/go/issues/59719
+type ProgCache struct {
+ cmd *exec.Cmd
+ stdout io.ReadCloser // from the child process
+ stdin io.WriteCloser // to the child process
+ bw *bufio.Writer // to stdin
+ jenc *json.Encoder // to bw
+
+ // can are the commands that the child process declared that it supports.
+ // This is effectively the versioning mechanism.
+ can map[ProgCmd]bool
+
+ // fuzzDirCache is another Cache implementation to use for the FuzzDir
+ // method. In practice this is the default GOCACHE disk-based
+ // implementation.
+ //
+ // TODO(bradfitz): maybe this isn't ideal. But we'd need to extend the Cache
+ // interface and the fuzzing callers to be less disk-y to do more here.
+ fuzzDirCache Cache
+
+ closing atomic.Bool
+ ctx context.Context // valid until Close via ctxClose
+ ctxCancel context.CancelFunc // called on Close
+ readLoopDone chan struct{} // closed when readLoop returns
+
+ mu sync.Mutex // guards following fields
+ nextID int64
+ inFlight map[int64]chan<- *ProgResponse
+ outputFile map[OutputID]string // object => abs path on disk
+
+ // writeMu serializes writing to the child process.
+ // It must never be held at the same time as mu.
+ writeMu sync.Mutex
+}
+
+// ProgCmd is a command that can be issued to a child process.
+//
+// If the interface needs to grow, we can add new commands or new versioned
+// commands like "get2".
+type ProgCmd string
+
+const (
+ cmdGet = ProgCmd("get")
+ cmdPut = ProgCmd("put")
+ cmdClose = ProgCmd("close")
+)
+
+// ProgRequest is the JSON-encoded message that's sent from cmd/go to
+// the GOLANGCI_LINT_CACHEPROG child process over stdin. Each JSON object is on its
+// own line. A ProgRequest of Type "put" with BodySize > 0 will be followed
+// by a line containing a base64-encoded JSON string literal of the body.
+type ProgRequest struct {
+ // ID is a unique number per process across all requests.
+ // It must be echoed in the ProgResponse from the child.
+ ID int64
+
+ // Command is the type of request.
+ // The cmd/go tool will only send commands that were declared
+ // as supported by the child.
+ Command ProgCmd
+
+ // ActionID is non-nil for get and puts.
+ ActionID []byte `json:",omitempty"` // or nil if not used
+
+ // ObjectID is set for Type "put" and "output-file".
+ ObjectID []byte `json:",omitempty"` // or nil if not used
+
+ // Body is the body for "put" requests. It's sent after the JSON object
+ // as a base64-encoded JSON string when BodySize is non-zero.
+ // It's sent as a separate JSON value instead of being a struct field
+ // send in this JSON object so large values can be streamed in both directions.
+ // The base64 string body of a ProgRequest will always be written
+ // immediately after the JSON object and a newline.
+ Body io.Reader `json:"-"`
+
+ // BodySize is the number of bytes of Body. If zero, the body isn't written.
+ BodySize int64 `json:",omitempty"`
+}
+
+// ProgResponse is the JSON response from the child process to cmd/go.
+//
+// With the exception of the first protocol message that the child writes to its
+// stdout with ID==0 and KnownCommands populated, these are only sent in
+// response to a ProgRequest from cmd/go.
+//
+// ProgResponses can be sent in any order. The ID must match the request they're
+// replying to.
+type ProgResponse struct {
+ ID int64 // that corresponds to ProgRequest; they can be answered out of order
+ Err string `json:",omitempty"` // if non-empty, the error
+
+ // KnownCommands is included in the first message that cache helper program
+ // writes to stdout on startup (with ID==0). It includes the
+ // ProgRequest.Command types that are supported by the program.
+ //
+ // This lets us extend the protocol gracefully over time (adding "get2",
+ // etc), or fail gracefully when needed. It also lets us verify the program
+ // wants to be a cache helper.
+ KnownCommands []ProgCmd `json:",omitempty"`
+
+ // For Get requests.
+
+ Miss bool `json:",omitempty"` // cache miss
+ OutputID []byte `json:",omitempty"`
+ Size int64 `json:",omitempty"` // in bytes
+ Time *time.Time `json:",omitempty"` // an Entry.Time; when the object was added to the docs
+
+ // DiskPath is the absolute path on disk of the ObjectID corresponding
+ // a "get" request's ActionID (on cache hit) or a "put" request's
+ // provided ObjectID.
+ DiskPath string `json:",omitempty"`
+}
+
+// startCacheProg starts the prog binary (with optional space-separated flags)
+// and returns a Cache implementation that talks to it.
+//
+// It blocks a few seconds to wait for the child process to successfully start
+// and advertise its capabilities.
+func startCacheProg(progAndArgs string, fuzzDirCache Cache) Cache {
+ if fuzzDirCache == nil {
+ panic("missing fuzzDirCache")
+ }
+ args, err := quoted.Split(progAndArgs)
+ if err != nil {
+ base.Fatalf("%s args: %v", envGolangciLintCacheProg, err)
+ }
+ var prog string
+ if len(args) > 0 {
+ prog = args[0]
+ args = args[1:]
+ }
+
+ ctx, ctxCancel := context.WithCancel(context.Background())
+
+ cmd := exec.CommandContext(ctx, prog, args...)
+ out, err := cmd.StdoutPipe()
+ if err != nil {
+ base.Fatalf("StdoutPipe to %s: %v", envGolangciLintCacheProg, err)
+ }
+ in, err := cmd.StdinPipe()
+ if err != nil {
+ base.Fatalf("StdinPipe to %s: %v", envGolangciLintCacheProg, err)
+ }
+ cmd.Stderr = os.Stderr
+ cmd.Cancel = in.Close
+
+ if err := cmd.Start(); err != nil {
+ base.Fatalf("error starting %s program %q: %v", envGolangciLintCacheProg, prog, err)
+ }
+
+ pc := &ProgCache{
+ ctx: ctx,
+ ctxCancel: ctxCancel,
+ fuzzDirCache: fuzzDirCache,
+ cmd: cmd,
+ stdout: out,
+ stdin: in,
+ bw: bufio.NewWriter(in),
+ inFlight: make(map[int64]chan<- *ProgResponse),
+ outputFile: make(map[OutputID]string),
+ readLoopDone: make(chan struct{}),
+ }
+
+ // Register our interest in the initial protocol message from the child to
+ // us, saying what it can do.
+ capResc := make(chan *ProgResponse, 1)
+ pc.inFlight[0] = capResc
+
+ pc.jenc = json.NewEncoder(pc.bw)
+ go pc.readLoop(pc.readLoopDone)
+
+ // Give the child process a few seconds to report its capabilities. This
+ // should be instant and not require any slow work by the program.
+ timer := time.NewTicker(5 * time.Second)
+ defer timer.Stop()
+ for {
+ select {
+ case <-timer.C:
+ log.Printf("# still waiting for %s %v ...", envGolangciLintCacheProg, prog)
+ case capRes := <-capResc:
+ can := map[ProgCmd]bool{}
+ for _, cmd := range capRes.KnownCommands {
+ can[cmd] = true
+ }
+ if len(can) == 0 {
+ base.Fatalf("%s %v declared no supported commands", envGolangciLintCacheProg, prog)
+ }
+ pc.can = can
+ return pc
+ }
+ }
+}
+
+func (c *ProgCache) readLoop(readLoopDone chan<- struct{}) {
+ defer close(readLoopDone)
+ jd := json.NewDecoder(c.stdout)
+ for {
+ res := new(ProgResponse)
+ if err := jd.Decode(res); err != nil {
+ if c.closing.Load() {
+ return // quietly
+ }
+ if err == io.EOF {
+ c.mu.Lock()
+ inFlight := len(c.inFlight)
+ c.mu.Unlock()
+ base.Fatalf("%s exited pre-Close with %v pending requests", envGolangciLintCacheProg, inFlight)
+ }
+ base.Fatalf("error reading JSON from %s: %v", envGolangciLintCacheProg, err)
+ }
+ c.mu.Lock()
+ ch, ok := c.inFlight[res.ID]
+ delete(c.inFlight, res.ID)
+ c.mu.Unlock()
+ if ok {
+ ch <- res
+ } else {
+ base.Fatalf("%s sent response for unknown request ID %v", envGolangciLintCacheProg, res.ID)
+ }
+ }
+}
+
+func (c *ProgCache) send(ctx context.Context, req *ProgRequest) (*ProgResponse, error) {
+ resc := make(chan *ProgResponse, 1)
+ if err := c.writeToChild(req, resc); err != nil {
+ return nil, err
+ }
+ select {
+ case res := <-resc:
+ if res.Err != "" {
+ return nil, errors.New(res.Err)
+ }
+ return res, nil
+ case <-ctx.Done():
+ return nil, ctx.Err()
+ }
+}
+
+func (c *ProgCache) writeToChild(req *ProgRequest, resc chan<- *ProgResponse) (err error) {
+ c.mu.Lock()
+ c.nextID++
+ req.ID = c.nextID
+ c.inFlight[req.ID] = resc
+ c.mu.Unlock()
+
+ defer func() {
+ if err != nil {
+ c.mu.Lock()
+ delete(c.inFlight, req.ID)
+ c.mu.Unlock()
+ }
+ }()
+
+ c.writeMu.Lock()
+ defer c.writeMu.Unlock()
+
+ if err := c.jenc.Encode(req); err != nil {
+ return err
+ }
+ if err := c.bw.WriteByte('\n'); err != nil {
+ return err
+ }
+ if req.Body != nil && req.BodySize > 0 {
+ if err := c.bw.WriteByte('"'); err != nil {
+ return err
+ }
+ e := base64.NewEncoder(base64.StdEncoding, c.bw)
+ wrote, err := io.Copy(e, req.Body)
+ if err != nil {
+ return err
+ }
+ if err := e.Close(); err != nil {
+ return nil
+ }
+ if wrote != req.BodySize {
+ return fmt.Errorf("short write writing body to %s for action %x, object %x: wrote %v; expected %v",
+ envGolangciLintCacheProg, req.ActionID, req.ObjectID, wrote, req.BodySize)
+ }
+ if _, err := c.bw.WriteString("\"\n"); err != nil {
+ return err
+ }
+ }
+ if err := c.bw.Flush(); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (c *ProgCache) Get(a ActionID) (Entry, error) {
+ if !c.can[cmdGet] {
+ // They can't do a "get". Maybe they're a write-only cache.
+ //
+ // TODO(bradfitz,bcmills): figure out the proper error type here. Maybe
+ // errors.ErrUnsupported? Is entryNotFoundError even appropriate? There
+ // might be places where we rely on the fact that a recent Put can be
+ // read through a corresponding Get. Audit callers and check, and document
+ // error types on the Cache interface.
+ return Entry{}, &entryNotFoundError{}
+ }
+ res, err := c.send(c.ctx, &ProgRequest{
+ Command: cmdGet,
+ ActionID: a[:],
+ })
+ if err != nil {
+ return Entry{}, err // TODO(bradfitz): or entryNotFoundError? Audit callers.
+ }
+ if res.Miss {
+ return Entry{}, &entryNotFoundError{}
+ }
+ e := Entry{
+ Size: res.Size,
+ }
+ if res.Time != nil {
+ e.Time = *res.Time
+ } else {
+ e.Time = time.Now()
+ }
+ if res.DiskPath == "" {
+ return Entry{}, &entryNotFoundError{fmt.Errorf("%s didn't populate DiskPath on get hit", envGolangciLintCacheProg)}
+ }
+ if copy(e.OutputID[:], res.OutputID) != len(res.OutputID) {
+ return Entry{}, &entryNotFoundError{errors.New("incomplete ProgResponse OutputID")}
+ }
+ c.noteOutputFile(e.OutputID, res.DiskPath)
+ return e, nil
+}
+
+func (c *ProgCache) noteOutputFile(o OutputID, diskPath string) {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ c.outputFile[o] = diskPath
+}
+
+func (c *ProgCache) OutputFile(o OutputID) string {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return c.outputFile[o]
+}
+
+func (c *ProgCache) Put(a ActionID, file io.ReadSeeker) (_ OutputID, size int64, _ error) {
+ // Compute output ID.
+ h := sha256.New()
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+ size, err := io.Copy(h, file)
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ var out OutputID
+ h.Sum(out[:0])
+
+ if _, err := file.Seek(0, 0); err != nil {
+ return OutputID{}, 0, err
+ }
+
+ if !c.can[cmdPut] {
+ // Child is a read-only cache. Do nothing.
+ return out, size, nil
+ }
+
+ res, err := c.send(c.ctx, &ProgRequest{
+ Command: cmdPut,
+ ActionID: a[:],
+ ObjectID: out[:],
+ Body: file,
+ BodySize: size,
+ })
+ if err != nil {
+ return OutputID{}, 0, err
+ }
+ if res.DiskPath == "" {
+ return OutputID{}, 0, fmt.Errorf("%s didn't return DiskPath in put response", envGolangciLintCacheProg)
+ }
+ c.noteOutputFile(out, res.DiskPath)
+ return out, size, err
+}
+
+func (c *ProgCache) Close() error {
+ c.closing.Store(true)
+ var err error
+
+ // First write a "close" message to the child so it can exit nicely
+ // and clean up if it wants. Only after that exchange do we cancel
+ // the context that kills the process.
+ if c.can[cmdClose] {
+ _, err = c.send(c.ctx, &ProgRequest{Command: cmdClose})
+ }
+ c.ctxCancel()
+ <-c.readLoopDone
+ return err
+}
+
+func (c *ProgCache) FuzzDir() string {
+ // TODO(bradfitz): figure out what to do here. For now just use the
+ // disk-based default.
+ return c.fuzzDirCache.FuzzDir()
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md
new file mode 100644
index 000000000..5be600e42
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/cache/readme.md
@@ -0,0 +1,51 @@
+# cache
+
+Extracted from `go/src/cmd/go/internal/cache/`.
+
+The main modifications are:
+- The errors management
+ - Some methods return error.
+ - Some errors are returned instead of being ignored.
+- The name of the env vars:
+ - `GOCACHE` -> `GOLANGCI_LINT_CACHE`
+ - `GOCACHEPROG` -> `GOLANGCI_LINT_CACHEPROG`
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/cache` to `internal/go/cache`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync with go1.23.2
+ - sync with go1.22.8
+ - sync with go1.21.13
+ - sync with go1.20.14
+ - sync with go1.19.13
+ - sync with go1.18.10
+ - sync with go1.17.13
+ - sync with go1.16.15
+ - sync with go1.15.15
+ - sync with go1.14.15
+
+## Previous History
+
+Based on the initial PR/commit the based in a mix between go1.12 and go1.13:
+- cache.go (go1.13)
+- cache_test.go (go1.12?)
+- default.go (go1.12?)
+- hash.go (go1.13 and go1.12 are identical)
+- hash_test.go -> (go1.12?)
+
+Adapted for golangci-lint:
+- https://github.com/golangci/golangci-lint/pull/699: initial code (contains modifications of the files)
+- https://github.com/golangci/golangci-lint/pull/779: just a nolint (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/788: only directory permissions changes (0777 -> 0744) (`cache.go`, `cache_test.go`, `default.go`)
+- https://github.com/golangci/golangci-lint/pull/808: mainly related to logs and errors (`cache.go`, `default.go`, `hash.go`, `hash_test.go`)
+- https://github.com/golangci/golangci-lint/pull/1063: `ioutil` -> `robustio` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/1070: add `t.Parallel()` inside `cache_test.go`
+- https://github.com/golangci/golangci-lint/pull/1162: errors inside `cache.go`
+- https://github.com/golangci/golangci-lint/pull/2318: `ioutil` -> `os` (`cache.go`, `cache_test.go`, `default.go`, `hash_test.go`)
+- https://github.com/golangci/golangci-lint/pull/2352: Go doc typos
+- https://github.com/golangci/golangci-lint/pull/3012: errors inside `cache.go` (`cache.go`, `default.go`)
+- https://github.com/golangci/golangci-lint/pull/3196: constant for `GOLANGCI_LINT_CACHE` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/3204: add this file and `%w` in `fmt.Errorf` (`cache.go`)
+- https://github.com/golangci/golangci-lint/pull/3604: remove `github.com/pkg/errors` (`cache.go`)
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go
new file mode 100644
index 000000000..fcbd3e08c
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap.go
@@ -0,0 +1,31 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// This package is a lightly modified version of the mmap code
+// in github.com/google/codesearch/index.
+
+// The mmap package provides an abstraction for memory mapping files
+// on different platforms.
+package mmap
+
+import (
+ "os"
+)
+
+// Data is mmap'ed read-only data from a file.
+// The backing file is never closed, so Data
+// remains valid for the lifetime of the process.
+type Data struct {
+ f *os.File
+ Data []byte
+}
+
+// Mmap maps the given file into memory.
+func Mmap(file string) (Data, error) {
+ f, err := os.Open(file)
+ if err != nil {
+ return Data{}, err
+ }
+ return mmapFile(f)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go
new file mode 100644
index 000000000..4d2844fc3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_other.go
@@ -0,0 +1,21 @@
+// Copyright 2022 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build (js && wasm) || wasip1 || plan9
+
+package mmap
+
+import (
+ "io"
+ "os"
+)
+
+// mmapFile on other systems doesn't mmap the file. It just reads everything.
+func mmapFile(f *os.File) (Data, error) {
+ b, err := io.ReadAll(f)
+ if err != nil {
+ return Data{}, err
+ }
+ return Data{f, b}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go
new file mode 100644
index 000000000..5dce87236
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_unix.go
@@ -0,0 +1,36 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build unix
+
+package mmap
+
+import (
+ "fmt"
+ "io/fs"
+ "os"
+ "syscall"
+)
+
+func mmapFile(f *os.File) (Data, error) {
+ st, err := f.Stat()
+ if err != nil {
+ return Data{}, err
+ }
+ size := st.Size()
+ pagesize := int64(os.Getpagesize())
+ if int64(int(size+(pagesize-1))) != size+(pagesize-1) {
+ return Data{}, fmt.Errorf("%s: too large for mmap", f.Name())
+ }
+ n := int(size)
+ if n == 0 {
+ return Data{f, nil}, nil
+ }
+ mmapLength := int(((size + pagesize - 1) / pagesize) * pagesize) // round up to page size
+ data, err := syscall.Mmap(int(f.Fd()), 0, mmapLength, syscall.PROT_READ, syscall.MAP_SHARED)
+ if err != nil {
+ return Data{}, &fs.PathError{Op: "mmap", Path: f.Name(), Err: err}
+ }
+ return Data{f, data[:n]}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go
new file mode 100644
index 000000000..479ee3075
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/mmap_windows.go
@@ -0,0 +1,41 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package mmap
+
+import (
+ "fmt"
+ "os"
+ "syscall"
+ "unsafe"
+
+ "golang.org/x/sys/windows"
+)
+
+func mmapFile(f *os.File) (Data, error) {
+ st, err := f.Stat()
+ if err != nil {
+ return Data{}, err
+ }
+ size := st.Size()
+ if size == 0 {
+ return Data{f, nil}, nil
+ }
+ h, err := syscall.CreateFileMapping(syscall.Handle(f.Fd()), nil, syscall.PAGE_READONLY, 0, 0, nil)
+ if err != nil {
+ return Data{}, fmt.Errorf("CreateFileMapping %s: %w", f.Name(), err)
+ }
+
+ addr, err := syscall.MapViewOfFile(h, syscall.FILE_MAP_READ, 0, 0, 0)
+ if err != nil {
+ return Data{}, fmt.Errorf("MapViewOfFile %s: %w", f.Name(), err)
+ }
+ var info windows.MemoryBasicInformation
+ err = windows.VirtualQuery(addr, &info, unsafe.Sizeof(info))
+ if err != nil {
+ return Data{}, fmt.Errorf("VirtualQuery %s: %w", f.Name(), err)
+ }
+ data := unsafe.Slice((*byte)(unsafe.Pointer(addr)), int(info.RegionSize))
+ return Data{f, data}, nil
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md
new file mode 100644
index 000000000..f68aef097
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/mmap/readme.md
@@ -0,0 +1,15 @@
+# mmap
+
+Extracted from `go/src/cmd/go/internal/mmap/` (related to `cache`).
+This is just a copy of the Go code without any changes.
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/mmap` to `internal/go/mmap`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync with go1.23.2
+ - sync with go1.22.8
+ - sync with go1.21.13
+ - sync with go1.20.14
+ - sync with go1.19.13
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go
new file mode 100644
index 000000000..a81227507
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/quoted.go
@@ -0,0 +1,129 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package quoted provides string manipulation utilities.
+package quoted
+
+import (
+ "flag"
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+func isSpaceByte(c byte) bool {
+ return c == ' ' || c == '\t' || c == '\n' || c == '\r'
+}
+
+// Split splits s into a list of fields,
+// allowing single or double quotes around elements.
+// There is no unescaping or other processing within
+// quoted fields.
+//
+// Keep in sync with cmd/dist/quoted.go
+func Split(s string) ([]string, error) {
+ // Split fields allowing '' or "" around elements.
+ // Quotes further inside the string do not count.
+ var f []string
+ for len(s) > 0 {
+ for len(s) > 0 && isSpaceByte(s[0]) {
+ s = s[1:]
+ }
+ if len(s) == 0 {
+ break
+ }
+ // Accepted quoted string. No unescaping inside.
+ if s[0] == '"' || s[0] == '\'' {
+ quote := s[0]
+ s = s[1:]
+ i := 0
+ for i < len(s) && s[i] != quote {
+ i++
+ }
+ if i >= len(s) {
+ return nil, fmt.Errorf("unterminated %c string", quote)
+ }
+ f = append(f, s[:i])
+ s = s[i+1:]
+ continue
+ }
+ i := 0
+ for i < len(s) && !isSpaceByte(s[i]) {
+ i++
+ }
+ f = append(f, s[:i])
+ s = s[i:]
+ }
+ return f, nil
+}
+
+// Join joins a list of arguments into a string that can be parsed
+// with Split. Arguments are quoted only if necessary; arguments
+// without spaces or quotes are kept as-is. No argument may contain both
+// single and double quotes.
+func Join(args []string) (string, error) {
+ var buf []byte
+ for i, arg := range args {
+ if i > 0 {
+ buf = append(buf, ' ')
+ }
+ var sawSpace, sawSingleQuote, sawDoubleQuote bool
+ for _, c := range arg {
+ switch {
+ case c > unicode.MaxASCII:
+ continue
+ case isSpaceByte(byte(c)):
+ sawSpace = true
+ case c == '\'':
+ sawSingleQuote = true
+ case c == '"':
+ sawDoubleQuote = true
+ }
+ }
+ switch {
+ case !sawSpace && !sawSingleQuote && !sawDoubleQuote:
+ buf = append(buf, arg...)
+
+ case !sawSingleQuote:
+ buf = append(buf, '\'')
+ buf = append(buf, arg...)
+ buf = append(buf, '\'')
+
+ case !sawDoubleQuote:
+ buf = append(buf, '"')
+ buf = append(buf, arg...)
+ buf = append(buf, '"')
+
+ default:
+ return "", fmt.Errorf("argument %q contains both single and double quotes and cannot be quoted", arg)
+ }
+ }
+ return string(buf), nil
+}
+
+// A Flag parses a list of string arguments encoded with Join.
+// It is useful for flags like cmd/link's -extldflags.
+type Flag []string
+
+var _ flag.Value = (*Flag)(nil)
+
+func (f *Flag) Set(v string) error {
+ fs, err := Split(v)
+ if err != nil {
+ return err
+ }
+ *f = fs[:len(fs):len(fs)]
+ return nil
+}
+
+func (f *Flag) String() string {
+ if f == nil {
+ return ""
+ }
+ s, err := Join(*f)
+ if err != nil {
+ return strings.Join(*f, " ")
+ }
+ return s
+}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md
new file mode 100644
index 000000000..a5e4c4bb3
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/quoted/readme.md
@@ -0,0 +1,13 @@
+# quoted
+
+Extracted from `go/src/cmd/internal/quoted/` (related to `cache`).
+This is just a copy of the Go code without any changes.
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/quoted` to `internal/go/quoted`
+- https://github.com/golangci/golangci-lint/pull/5098
+ - sync go1.23.2
+ - sync go1.22.8
+ - sync go1.21.13
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md
index 7c7ba0483..f4dbc1626 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/readme.md
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/readme.md
@@ -4,3 +4,8 @@ Extracted from go1.19.1/src/cmd/go/internal/robustio
There is only one modification:
- ERROR_SHARING_VIOLATION extracted from go1.19.1/src/internal/syscall/windows/syscall_windows.go to remove the dependencies to `internal/syscall/windows`
+
+## History
+
+- https://github.com/golangci/golangci-lint/pull/5100
+ - Move package from `internal/robustio` to `internal/go/robustio`
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go
index 15b33773c..15b33773c 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go
index 99fd8ebc2..99fd8ebc2 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_darwin.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_darwin.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go
index c56e36ca6..c56e36ca6 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_flaky.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_flaky.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go
index da9a46e4f..da9a46e4f 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_other.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_other.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go
index fe1728954..fe1728954 100644
--- a/vendor/github.com/golangci/golangci-lint/internal/robustio/robustio_windows.go
+++ b/vendor/github.com/golangci/golangci-lint/internal/go/robustio/robustio_windows.go
diff --git a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go b/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go
deleted file mode 100644
index 3b3422eb7..000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/pkgcache/pkgcache.go
+++ /dev/null
@@ -1,229 +0,0 @@
-package pkgcache
-
-import (
- "bytes"
- "encoding/gob"
- "encoding/hex"
- "errors"
- "fmt"
- "runtime"
- "sort"
- "sync"
-
- "golang.org/x/tools/go/packages"
-
- "github.com/golangci/golangci-lint/internal/cache"
- "github.com/golangci/golangci-lint/pkg/logutils"
- "github.com/golangci/golangci-lint/pkg/timeutils"
-)
-
-type HashMode int
-
-const (
- HashModeNeedOnlySelf HashMode = iota
- HashModeNeedDirectDeps
- HashModeNeedAllDeps
-)
-
-// Cache is a per-package data cache. A cached data is invalidated when
-// package, or it's dependencies change.
-type Cache struct {
- lowLevelCache *cache.Cache
- pkgHashes sync.Map
- sw *timeutils.Stopwatch
- log logutils.Log // not used now, but may be needed for future debugging purposes
- ioSem chan struct{} // semaphore limiting parallel IO
-}
-
-func NewCache(sw *timeutils.Stopwatch, log logutils.Log) (*Cache, error) {
- c, err := cache.Default()
- if err != nil {
- return nil, err
- }
- return &Cache{
- lowLevelCache: c,
- sw: sw,
- log: log,
- ioSem: make(chan struct{}, runtime.GOMAXPROCS(-1)),
- }, nil
-}
-
-func (c *Cache) Trim() {
- c.sw.TrackStage("trim", func() {
- c.lowLevelCache.Trim()
- })
-}
-
-func (c *Cache) Put(pkg *packages.Package, mode HashMode, key string, data any) error {
- var err error
- buf := &bytes.Buffer{}
- c.sw.TrackStage("gob", func() {
- err = gob.NewEncoder(buf).Encode(data)
- })
- if err != nil {
- return fmt.Errorf("failed to gob encode: %w", err)
- }
-
- var aID cache.ActionID
-
- c.sw.TrackStage("key build", func() {
- aID, err = c.pkgActionID(pkg, mode)
- if err == nil {
- subkey, subkeyErr := cache.Subkey(aID, key)
- if subkeyErr != nil {
- err = fmt.Errorf("failed to build subkey: %w", subkeyErr)
- }
- aID = subkey
- }
- })
- if err != nil {
- return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
- }
- c.ioSem <- struct{}{}
- c.sw.TrackStage("cache io", func() {
- err = c.lowLevelCache.PutBytes(aID, buf.Bytes())
- })
- <-c.ioSem
- if err != nil {
- return fmt.Errorf("failed to save data to low-level cache by key %s for package %s: %w", key, pkg.Name, err)
- }
-
- return nil
-}
-
-var ErrMissing = errors.New("missing data")
-
-func (c *Cache) Get(pkg *packages.Package, mode HashMode, key string, data any) error {
- var aID cache.ActionID
- var err error
- c.sw.TrackStage("key build", func() {
- aID, err = c.pkgActionID(pkg, mode)
- if err == nil {
- subkey, subkeyErr := cache.Subkey(aID, key)
- if subkeyErr != nil {
- err = fmt.Errorf("failed to build subkey: %w", subkeyErr)
- }
- aID = subkey
- }
- })
- if err != nil {
- return fmt.Errorf("failed to calculate package %s action id: %w", pkg.Name, err)
- }
-
- var b []byte
- c.ioSem <- struct{}{}
- c.sw.TrackStage("cache io", func() {
- b, _, err = c.lowLevelCache.GetBytes(aID)
- })
- <-c.ioSem
- if err != nil {
- if cache.IsErrMissing(err) {
- return ErrMissing
- }
- return fmt.Errorf("failed to get data from low-level cache by key %s for package %s: %w", key, pkg.Name, err)
- }
-
- c.sw.TrackStage("gob", func() {
- err = gob.NewDecoder(bytes.NewReader(b)).Decode(data)
- })
- if err != nil {
- return fmt.Errorf("failed to gob decode: %w", err)
- }
-
- return nil
-}
-
-func (c *Cache) pkgActionID(pkg *packages.Package, mode HashMode) (cache.ActionID, error) {
- hash, err := c.packageHash(pkg, mode)
- if err != nil {
- return cache.ActionID{}, fmt.Errorf("failed to get package hash: %w", err)
- }
-
- key, err := cache.NewHash("action ID")
- if err != nil {
- return cache.ActionID{}, fmt.Errorf("failed to make a hash: %w", err)
- }
- fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
- fmt.Fprintf(key, "pkghash %s\n", hash)
-
- return key.Sum(), nil
-}
-
-// packageHash computes a package's hash. The hash is based on all Go
-// files that make up the package, as well as the hashes of imported
-// packages.
-func (c *Cache) packageHash(pkg *packages.Package, mode HashMode) (string, error) {
- type hashResults map[HashMode]string
- hashResI, ok := c.pkgHashes.Load(pkg)
- if ok {
- hashRes := hashResI.(hashResults)
- if _, ok := hashRes[mode]; !ok {
- return "", fmt.Errorf("no mode %d in hash result", mode)
- }
- return hashRes[mode], nil
- }
-
- hashRes := hashResults{}
-
- key, err := cache.NewHash("package hash")
- if err != nil {
- return "", fmt.Errorf("failed to make a hash: %w", err)
- }
-
- fmt.Fprintf(key, "pkgpath %s\n", pkg.PkgPath)
- for _, f := range pkg.CompiledGoFiles {
- c.ioSem <- struct{}{}
- h, fErr := cache.FileHash(f)
- <-c.ioSem
- if fErr != nil {
- return "", fmt.Errorf("failed to calculate file %s hash: %w", f, fErr)
- }
- fmt.Fprintf(key, "file %s %x\n", f, h)
- }
- curSum := key.Sum()
- hashRes[HashModeNeedOnlySelf] = hex.EncodeToString(curSum[:])
-
- imps := make([]*packages.Package, 0, len(pkg.Imports))
- for _, imp := range pkg.Imports {
- imps = append(imps, imp)
- }
- sort.Slice(imps, func(i, j int) bool {
- return imps[i].PkgPath < imps[j].PkgPath
- })
-
- calcDepsHash := func(depMode HashMode) error {
- for _, dep := range imps {
- if dep.PkgPath == "unsafe" {
- continue
- }
-
- depHash, depErr := c.packageHash(dep, depMode)
- if depErr != nil {
- return fmt.Errorf("failed to calculate hash for dependency %s with mode %d: %w", dep.Name, depMode, depErr)
- }
-
- fmt.Fprintf(key, "import %s %s\n", dep.PkgPath, depHash)
- }
- return nil
- }
-
- if err := calcDepsHash(HashModeNeedOnlySelf); err != nil {
- return "", err
- }
-
- curSum = key.Sum()
- hashRes[HashModeNeedDirectDeps] = hex.EncodeToString(curSum[:])
-
- if err := calcDepsHash(HashModeNeedAllDeps); err != nil {
- return "", err
- }
- curSum = key.Sum()
- hashRes[HashModeNeedAllDeps] = hex.EncodeToString(curSum[:])
-
- if _, ok := hashRes[mode]; !ok {
- return "", fmt.Errorf("invalid mode %d", mode)
- }
-
- c.pkgHashes.Store(pkg, hashRes)
- return hashRes[mode], nil
-}
diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md b/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md
deleted file mode 100644
index 36ec6ed49..000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/renameio/readme.md
+++ /dev/null
@@ -1,10 +0,0 @@
-# renameio
-
-Extracted from go/src/cmd/go/internal/renameio/
-I don't know what version of Go this package was pulled from.
-
-Adapted for golangci-lint:
-- https://github.com/golangci/golangci-lint/pull/699
-- https://github.com/golangci/golangci-lint/pull/808
-- https://github.com/golangci/golangci-lint/pull/1063
-- https://github.com/golangci/golangci-lint/pull/3204
diff --git a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go b/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go
deleted file mode 100644
index 2f88f4f7c..000000000
--- a/vendor/github.com/golangci/golangci-lint/internal/renameio/renameio.go
+++ /dev/null
@@ -1,93 +0,0 @@
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package renameio writes files atomically by renaming temporary files.
-package renameio
-
-import (
- "bytes"
- "io"
- "math/rand"
- "os"
- "path/filepath"
- "strconv"
-
- "github.com/golangci/golangci-lint/internal/robustio"
-)
-
-const patternSuffix = ".tmp"
-
-// Pattern returns a glob pattern that matches the unrenamed temporary files
-// created when writing to filename.
-func Pattern(filename string) string {
- return filepath.Join(filepath.Dir(filename), filepath.Base(filename)+patternSuffix)
-}
-
-// WriteFile is like os.WriteFile, but first writes data to an arbitrary
-// file in the same directory as filename, then renames it atomically to the
-// final name.
-//
-// That ensures that the final location, if it exists, is always a complete file.
-func WriteFile(filename string, data []byte, perm os.FileMode) (err error) {
- return WriteToFile(filename, bytes.NewReader(data), perm)
-}
-
-// WriteToFile is a variant of WriteFile that accepts the data as an io.Reader
-// instead of a slice.
-func WriteToFile(filename string, data io.Reader, perm os.FileMode) (err error) {
- f, err := tempFile(filepath.Dir(filename), filepath.Base(filename), perm)
- if err != nil {
- return err
- }
- defer func() {
- // Only call os.Remove on f.Name() if we failed to rename it: otherwise,
- // some other process may have created a new file with the same name after
- // that.
- if err != nil {
- f.Close()
- os.Remove(f.Name())
- }
- }()
-
- if _, err := io.Copy(f, data); err != nil {
- return err
- }
- // Sync the file before renaming it: otherwise, after a crash the reader may
- // observe a 0-length file instead of the actual contents.
- // See https://golang.org/issue/22397#issuecomment-380831736.
- if err := f.Sync(); err != nil {
- return err
- }
- if err := f.Close(); err != nil {
- return err
- }
-
- return robustio.Rename(f.Name(), filename)
-}
-
-// tempFile creates a new temporary file with given permission bits.
-func tempFile(dir, prefix string, perm os.FileMode) (f *os.File, err error) {
- for i := 0; i < 10000; i++ {
- name := filepath.Join(dir, prefix+strconv.Itoa(rand.Intn(1000000000))+patternSuffix)
- f, err = os.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, perm)
- if os.IsExist(err) {
- continue
- }
- break
- }
- return
-}
-
-// ReadFile is like os.ReadFile, but on Windows retries spurious errors that
-// may occur if the file is concurrently replaced.
-//
-// Errors are classified heuristically and retries are bounded, so even this
-// function may occasionally return a spurious error on Windows.
-// If so, the error will likely wrap one of:
-// - syscall.ERROR_ACCESS_DENIED
-// - syscall.ERROR_FILE_NOT_FOUND
-// - internal/syscall/windows.ERROR_SHARING_VIOLATION
-func ReadFile(filename string) ([]byte, error) {
- return robustio.ReadFile(filename)
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
index cc6c0eacd..4f2c812dc 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/cache.go
@@ -62,6 +62,7 @@ func (*cacheCommand) executeClean(_ *cobra.Command, _ []string) error {
func (*cacheCommand) executeStatus(_ *cobra.Command, _ []string) {
cacheDir := cache.DefaultDir()
+
_, _ = fmt.Fprintf(logutils.StdOut, "Dir: %s\n", cacheDir)
cacheSizeBytes, err := dirSizeBytes(cacheDir)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
index f289bfdd7..ff7c5e467 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/commands/run.go
@@ -28,7 +28,6 @@ import (
"gopkg.in/yaml.v3"
"github.com/golangci/golangci-lint/internal/cache"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/fsutils"
@@ -209,7 +208,7 @@ func (c *runCommand) preRunE(_ *cobra.Command, args []string) error {
sw := timeutils.NewStopwatch("pkgcache", c.log.Child(logutils.DebugKeyStopwatch))
- pkgCache, err := pkgcache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache))
+ pkgCache, err := cache.NewCache(sw, c.log.Child(logutils.DebugKeyPkgCache))
if err != nil {
return fmt.Errorf("failed to build packages cache: %w", err)
}
@@ -640,7 +639,7 @@ func initHashSalt(version string, cfg *config.Config) error {
b := bytes.NewBuffer(binSalt)
b.Write(configSalt)
- cache.SetSalt(b.Bytes())
+ cache.SetSalt(b)
return nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
index 93b331bec..b863b329f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/config.go
@@ -75,10 +75,9 @@ func IsGoGreaterThanOrEqual(current, limit string) bool {
}
func detectGoVersion() string {
- file, _ := gomoddirectives.GetModuleFile()
-
- if file != nil && file.Go != nil && file.Go.Version != "" {
- return file.Go.Version
+ goVersion := detectGoVersionFromGoMod()
+ if goVersion != "" {
+ return goVersion
}
v := os.Getenv("GOVERSION")
@@ -88,3 +87,26 @@ func detectGoVersion() string {
return "1.17"
}
+
+// detectGoVersionFromGoMod tries to get Go version from go.mod.
+// It returns `toolchain` version if present,
+// else it returns `go` version if present,
+// else it returns empty.
+func detectGoVersionFromGoMod() string {
+ file, _ := gomoddirectives.GetModuleFile()
+ if file == nil {
+ return ""
+ }
+
+ // The toolchain exists only if 'toolchain' version > 'go' version.
+ // If 'toolchain' version <= 'go' version, `go mod tidy` will remove 'toolchain' version from go.mod.
+ if file.Toolchain != nil && file.Toolchain.Name != "" {
+ return strings.TrimPrefix(file.Toolchain.Name, "go")
+ }
+
+ if file.Go != nil && file.Go.Version != "" {
+ return file.Go.Version
+ }
+
+ return ""
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
index 109de4243..b182d1e0f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/linters_settings.go
@@ -47,6 +47,9 @@ var defaultLintersSettings = LintersSettings{
Sections: []string{"standard", "default"},
SkipGenerated: true,
},
+ GoChecksumType: GoChecksumTypeSettings{
+ DefaultSignifiesExhaustive: true,
+ },
Gocognit: GocognitSettings{
MinComplexity: 30,
},
@@ -216,6 +219,7 @@ type LintersSettings struct {
Gci GciSettings
GinkgoLinter GinkgoLinterSettings
Gocognit GocognitSettings
+ GoChecksumType GoChecksumTypeSettings
Goconst GoConstSettings
Gocritic GoCriticSettings
Gocyclo GoCycloSettings
@@ -225,7 +229,6 @@ type LintersSettings struct {
Gofumpt GofumptSettings
Goheader GoHeaderSettings
Goimports GoImportsSettings
- Gomnd GoMndSettings
GoModDirectives GoModDirectivesSettings
Gomodguard GoModGuardSettings
Gosec GoSecSettings
@@ -233,6 +236,7 @@ type LintersSettings struct {
Gosmopolitan GosmopolitanSettings
Govet GovetSettings
Grouper GrouperSettings
+ Iface IfaceSettings
ImportAs ImportAsSettings
Inamedparam INamedParamSettings
InterfaceBloat InterfaceBloatSettings
@@ -483,6 +487,11 @@ type GinkgoLinterSettings struct {
ForceExpectTo bool `mapstructure:"force-expect-to"`
ValidateAsyncIntervals bool `mapstructure:"validate-async-intervals"`
ForbidSpecPollution bool `mapstructure:"forbid-spec-pollution"`
+ ForceSucceedForFuncs bool `mapstructure:"force-succeed"`
+}
+
+type GoChecksumTypeSettings struct {
+ DefaultSignifiesExhaustive bool `mapstructure:"default-signifies-exhaustive"`
}
type GocognitSettings struct {
@@ -560,14 +569,6 @@ type GoImportsSettings struct {
LocalPrefixes string `mapstructure:"local-prefixes"`
}
-// Deprecated: use MndSettings.
-type GoMndSettings struct {
- MndSettings `mapstructure:",squash"`
-
- // Deprecated: use root level settings instead.
- Settings map[string]map[string]any
-}
-
type GoModDirectivesSettings struct {
ReplaceAllowList []string `mapstructure:"replace-allow-list"`
ReplaceLocal bool `mapstructure:"replace-local"`
@@ -648,6 +649,11 @@ type GrouperSettings struct {
VarRequireGrouping bool `mapstructure:"var-require-grouping"`
}
+type IfaceSettings struct {
+ Enable []string `mapstructure:"enable"`
+ Settings map[string]map[string]any `mapstructure:"settings"`
+}
+
type ImportAsSettings struct {
Alias []ImportAsAlias
NoUnaliased bool `mapstructure:"no-unaliased"`
@@ -725,7 +731,8 @@ type NestifSettings struct {
}
type NilNilSettings struct {
- CheckedTypes []string `mapstructure:"checked-types"`
+ DetectOpposite bool `mapstructure:"detect-opposite"`
+ CheckedTypes []string `mapstructure:"checked-types"`
}
type NlreturnSettings struct {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
index efdbfce1f..efeed3ca4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/config/loader.go
@@ -302,17 +302,6 @@ func (l *Loader) handleGoVersion() {
l.cfg.LintersSettings.Gocritic.Go = trimmedGoVersion
- // staticcheck related linters.
- if l.cfg.LintersSettings.Staticcheck.GoVersion == "" {
- l.cfg.LintersSettings.Staticcheck.GoVersion = trimmedGoVersion
- }
- if l.cfg.LintersSettings.Gosimple.GoVersion == "" {
- l.cfg.LintersSettings.Gosimple.GoVersion = trimmedGoVersion
- }
- if l.cfg.LintersSettings.Stylecheck.GoVersion == "" {
- l.cfg.LintersSettings.Stylecheck.GoVersion = trimmedGoVersion
- }
-
os.Setenv("GOSECGOVERSION", l.cfg.Run.Go)
}
@@ -413,12 +402,6 @@ func (l *Loader) handleLinterOptionDeprecations() {
l.log.Warnf("The configuration option `linters.godot.check-all` is deprecated, please use `linters.godot.scope: all`.")
}
- // Deprecated since v1.44.0.
- if len(l.cfg.LintersSettings.Gomnd.Settings) > 0 {
- l.log.Warnf("The configuration option `linters.gomnd.settings` is deprecated. Please use the options " +
- "`linters.gomnd.checks`,`linters.gomnd.ignored-numbers`,`linters.gomnd.ignored-files`,`linters.gomnd.ignored-functions`.")
- }
-
// Deprecated since v1.47.0
if l.cfg.LintersSettings.Gofumpt.LangVersion != "" {
l.log.Warnf("The configuration option `linters.gofumpt.lang-version` is deprecated, please use global `run.go`.")
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
index c1274ec09..ac03c71ec 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner.go
@@ -1,8 +1,3 @@
-// checker is a partial copy of https://github.com/golang/tools/blob/master/go/analysis/internal/checker
-// Copyright 2018 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
// Package goanalysis defines the implementation of the checker commands.
// The same code drives the multi-analysis driver, the single-analysis
// driver that is conventionally provided for convenience along with
@@ -21,8 +16,8 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/internal/errorutil"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
"github.com/golangci/golangci-lint/pkg/logutils"
"github.com/golangci/golangci-lint/pkg/timeutils"
@@ -52,7 +47,7 @@ type Diagnostic struct {
type runner struct {
log logutils.Log
prefix string // ensure unique analyzer names
- pkgCache *pkgcache.Cache
+ pkgCache *cache.Cache
loadGuard *load.Guard
loadMode LoadMode
passToPkg map[*analysis.Pass]*packages.Package
@@ -60,7 +55,7 @@ type runner struct {
sw *timeutils.Stopwatch
}
-func newRunner(prefix string, logger logutils.Log, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
+func newRunner(prefix string, logger logutils.Log, pkgCache *cache.Cache, loadGuard *load.Guard,
loadMode LoadMode, sw *timeutils.Stopwatch,
) *runner {
return &runner{
@@ -84,7 +79,6 @@ func (r *runner) run(analyzers []*analysis.Analyzer, initialPackages []*packages
[]error, map[*analysis.Pass]*packages.Package,
) {
debugf("Analyzing %d packages on load mode %s", len(initialPackages), r.loadMode)
- defer r.pkgCache.Trim()
roots := r.analyze(initialPackages, analyzers)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
index 58ea297ea..152cab181 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action.go
@@ -1,21 +1,10 @@
package goanalysis
import (
- "errors"
"fmt"
- "go/types"
- "io"
- "reflect"
"runtime/debug"
- "time"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/packages"
- "golang.org/x/tools/go/types/objectpath"
"github.com/golangci/golangci-lint/internal/errorutil"
- "github.com/golangci/golangci-lint/internal/pkgcache"
- "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
)
type actionAllocator struct {
@@ -39,54 +28,6 @@ func (actAlloc *actionAllocator) alloc() *action {
return act
}
-// An action represents one unit of analysis work: the application of
-// one analysis to one package. Actions form a DAG, both within a
-// package (as different analyzers are applied, either in sequence or
-// parallel), and across packages (as dependencies are analyzed).
-type action struct {
- a *analysis.Analyzer
- pkg *packages.Package
- pass *analysis.Pass
- deps []*action
- objectFacts map[objectFactKey]analysis.Fact
- packageFacts map[packageFactKey]analysis.Fact
- result any
- diagnostics []analysis.Diagnostic
- err error
- r *runner
- analysisDoneCh chan struct{}
- loadCachedFactsDone bool
- loadCachedFactsOk bool
- isroot bool
- isInitialPkg bool
- needAnalyzeSource bool
-}
-
-func (act *action) String() string {
- return fmt.Sprintf("%s@%s", act.a, act.pkg)
-}
-
-func (act *action) loadCachedFacts() bool {
- if act.loadCachedFactsDone { // can't be set in parallel
- return act.loadCachedFactsOk
- }
-
- res := func() bool {
- if act.isInitialPkg {
- return true // load cached facts only for non-initial packages
- }
-
- if len(act.a.FactTypes) == 0 {
- return true // no need to load facts
- }
-
- return act.loadPersistedFacts()
- }()
- act.loadCachedFactsDone = true
- act.loadCachedFactsOk = res
- return res
-}
-
func (act *action) waitUntilDependingAnalyzersWorked() {
for _, dep := range act.deps {
if dep.pkg == act.pkg {
@@ -109,268 +50,8 @@ func (act *action) analyzeSafe() {
act.a.Name, act.pkg.Name, act.isInitialPkg, act.needAnalyzeSource, p), debug.Stack())
}
}()
- act.r.sw.TrackStage(act.a.Name, func() {
- act.analyze()
- })
-}
-
-func (act *action) analyze() {
- defer close(act.analysisDoneCh) // unblock actions depending on this action
-
- if !act.needAnalyzeSource {
- return
- }
-
- defer func(now time.Time) {
- analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now))
- }(time.Now())
-
- // Report an error if any dependency failures.
- var depErrors error
- for _, dep := range act.deps {
- if dep.err == nil {
- continue
- }
-
- depErrors = errors.Join(depErrors, errors.Unwrap(dep.err))
- }
- if depErrors != nil {
- act.err = fmt.Errorf("failed prerequisites: %w", depErrors)
- return
- }
-
- // Plumb the output values of the dependencies
- // into the inputs of this action. Also facts.
- inputs := make(map[*analysis.Analyzer]any)
- startedAt := time.Now()
- for _, dep := range act.deps {
- if dep.pkg == act.pkg {
- // Same package, different analysis (horizontal edge):
- // in-memory outputs of prerequisite analyzers
- // become inputs to this analysis pass.
- inputs[dep.a] = dep.result
- } else if dep.a == act.a { // (always true)
- // Same analysis, different package (vertical edge):
- // serialized facts produced by prerequisite analysis
- // become available to this analysis pass.
- inheritFacts(act, dep)
- }
- }
- factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt))
-
- // Run the analysis.
- pass := &analysis.Pass{
- Analyzer: act.a,
- Fset: act.pkg.Fset,
- Files: act.pkg.Syntax,
- OtherFiles: act.pkg.OtherFiles,
- Pkg: act.pkg.Types,
- TypesInfo: act.pkg.TypesInfo,
- TypesSizes: act.pkg.TypesSizes,
- ResultOf: inputs,
- Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
- ImportObjectFact: act.importObjectFact,
- ExportObjectFact: act.exportObjectFact,
- ImportPackageFact: act.importPackageFact,
- ExportPackageFact: act.exportPackageFact,
- AllObjectFacts: act.allObjectFacts,
- AllPackageFacts: act.allPackageFacts,
- }
- act.pass = pass
- act.r.passToPkgGuard.Lock()
- act.r.passToPkg[pass] = act.pkg
- act.r.passToPkgGuard.Unlock()
-
- if act.pkg.IllTyped {
- // It looks like there should be !pass.Analyzer.RunDespiteErrors
- // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here,
- // but it exits before it if packages.Load have failed.
- act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg})
- } else {
- startedAt = time.Now()
- act.result, act.err = pass.Analyzer.Run(pass)
- analyzedIn := time.Since(startedAt)
- if analyzedIn > time.Millisecond*10 {
- debugf("%s: run analyzer in %s", act, analyzedIn)
- }
- }
-
- // disallow calls after Run
- pass.ExportObjectFact = nil
- pass.ExportPackageFact = nil
-
- if err := act.persistFactsToCache(); err != nil {
- act.r.log.Warnf("Failed to persist facts to cache: %s", err)
- }
-}
-
-// importObjectFact implements Pass.ImportObjectFact.
-// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
-// importObjectFact copies the fact value to *ptr.
-func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
- if obj == nil {
- panic("nil object")
- }
- key := objectFactKey{obj, act.factType(ptr)}
- if v, ok := act.objectFacts[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// exportObjectFact implements Pass.ExportObjectFact.
-func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
- if obj.Pkg() != act.pkg.Types {
- act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
- act.a, act.pkg, obj, fact)
- }
-
- key := objectFactKey{obj, act.factType(fact)}
- act.objectFacts[key] = fact // clobber any existing entry
- if isFactsExportDebug {
- objstr := types.ObjectString(obj, (*types.Package).Name)
- factsExportDebugf("%s: object %s has fact %s\n",
- act.pkg.Fset.Position(obj.Pos()), objstr, fact)
- }
-}
-
-func (act *action) allObjectFacts() []analysis.ObjectFact {
- out := make([]analysis.ObjectFact, 0, len(act.objectFacts))
- for key, fact := range act.objectFacts {
- out = append(out, analysis.ObjectFact{
- Object: key.obj,
- Fact: fact,
- })
- }
- return out
-}
-
-// importPackageFact implements Pass.ImportPackageFact.
-// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
-// fact copies the fact value to *ptr.
-func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
- if pkg == nil {
- panic("nil package")
- }
- key := packageFactKey{pkg, act.factType(ptr)}
- if v, ok := act.packageFacts[key]; ok {
- reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
- return true
- }
- return false
-}
-
-// exportPackageFact implements Pass.ExportPackageFact.
-func (act *action) exportPackageFact(fact analysis.Fact) {
- key := packageFactKey{act.pass.Pkg, act.factType(fact)}
- act.packageFacts[key] = fact // clobber any existing entry
- factsDebugf("%s: package %s has fact %s\n",
- act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact)
-}
-
-func (act *action) allPackageFacts() []analysis.PackageFact {
- out := make([]analysis.PackageFact, 0, len(act.packageFacts))
- for key, fact := range act.packageFacts {
- out = append(out, analysis.PackageFact{
- Package: key.pkg,
- Fact: fact,
- })
- }
- return out
-}
-
-func (act *action) factType(fact analysis.Fact) reflect.Type {
- t := reflect.TypeOf(fact)
- if t.Kind() != reflect.Ptr {
- act.r.log.Fatalf("invalid Fact type: got %T, want pointer", t)
- }
- return t
-}
-
-func (act *action) persistFactsToCache() error {
- analyzer := act.a
- if len(analyzer.FactTypes) == 0 {
- return nil
- }
-
- // Merge new facts into the package and persist them.
- var facts []Fact
- for key, fact := range act.packageFacts {
- if key.pkg != act.pkg.Types {
- // The fact is from inherited facts from another package
- continue
- }
- facts = append(facts, Fact{
- Path: "",
- Fact: fact,
- })
- }
- for key, fact := range act.objectFacts {
- obj := key.obj
- if obj.Pkg() != act.pkg.Types {
- // The fact is from inherited facts from another package
- continue
- }
-
- path, err := objectpath.For(obj)
- if err != nil {
- // The object is not globally addressable
- continue
- }
-
- facts = append(facts, Fact{
- Path: string(path),
- Fact: fact,
- })
- }
-
- factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
-
- key := fmt.Sprintf("%s/facts", analyzer.Name)
- return act.r.pkgCache.Put(act.pkg, pkgcache.HashModeNeedAllDeps, key, facts)
-}
-
-func (act *action) loadPersistedFacts() bool {
- var facts []Fact
- key := fmt.Sprintf("%s/facts", act.a.Name)
- if err := act.r.pkgCache.Get(act.pkg, pkgcache.HashModeNeedAllDeps, key, &facts); err != nil {
- if !errors.Is(err, pkgcache.ErrMissing) && !errors.Is(err, io.EOF) {
- act.r.log.Warnf("Failed to get persisted facts: %s", err)
- }
-
- factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name)
- return false
- }
-
- factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
-
- for _, f := range facts {
- if f.Path == "" { // this is a package fact
- key := packageFactKey{act.pkg.Types, act.factType(f.Fact)}
- act.packageFacts[key] = f.Fact
- continue
- }
- obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path))
- if err != nil {
- // Be lenient about these errors. For example, when
- // analyzing io/ioutil from source, we may get a fact
- // for methods on the devNull type, and objectpath
- // will happily create a path for them. However, when
- // we later load io/ioutil from export data, the path
- // no longer resolves.
- //
- // If an exported type embeds the unexported type,
- // then (part of) the unexported type will become part
- // of the type information and our path will resolve
- // again.
- continue
- }
- factKey := objectFactKey{obj, act.factType(f.Fact)}
- act.objectFacts[factKey] = f.Fact
- }
- return true
+ act.r.sw.TrackStage(act.a.Name, act.analyze)
}
func (act *action) markDepsForAnalyzingSource() {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go
new file mode 100644
index 000000000..fbc2f82fa
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_action_cache.go
@@ -0,0 +1,127 @@
+package goanalysis
+
+import (
+ "errors"
+ "fmt"
+ "io"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/types/objectpath"
+
+ "github.com/golangci/golangci-lint/internal/cache"
+)
+
+type Fact struct {
+ Path string // non-empty only for object facts
+ Fact analysis.Fact
+}
+
+func (act *action) loadCachedFacts() bool {
+ if act.loadCachedFactsDone { // can't be set in parallel
+ return act.loadCachedFactsOk
+ }
+
+ res := func() bool {
+ if act.isInitialPkg {
+ return true // load cached facts only for non-initial packages
+ }
+
+ if len(act.a.FactTypes) == 0 {
+ return true // no need to load facts
+ }
+
+ return act.loadPersistedFacts()
+ }()
+ act.loadCachedFactsDone = true
+ act.loadCachedFactsOk = res
+ return res
+}
+
+func (act *action) persistFactsToCache() error {
+ analyzer := act.a
+ if len(analyzer.FactTypes) == 0 {
+ return nil
+ }
+
+ // Merge new facts into the package and persist them.
+ var facts []Fact
+ for key, fact := range act.packageFacts {
+ if key.pkg != act.pkg.Types {
+ // The fact is from inherited facts from another package
+ continue
+ }
+ facts = append(facts, Fact{
+ Path: "",
+ Fact: fact,
+ })
+ }
+ for key, fact := range act.objectFacts {
+ obj := key.obj
+ if obj.Pkg() != act.pkg.Types {
+ // The fact is from inherited facts from another package
+ continue
+ }
+
+ path, err := objectpath.For(obj)
+ if err != nil {
+ // The object is not globally addressable
+ continue
+ }
+
+ facts = append(facts, Fact{
+ Path: string(path),
+ Fact: fact,
+ })
+ }
+
+ factsCacheDebugf("Caching %d facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
+
+ return act.r.pkgCache.Put(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(analyzer), facts)
+}
+
+func (act *action) loadPersistedFacts() bool {
+ var facts []Fact
+
+ err := act.r.pkgCache.Get(act.pkg, cache.HashModeNeedAllDeps, factCacheKey(act.a), &facts)
+ if err != nil {
+ if !errors.Is(err, cache.ErrMissing) && !errors.Is(err, io.EOF) {
+ act.r.log.Warnf("Failed to get persisted facts: %s", err)
+ }
+
+ factsCacheDebugf("No cached facts for package %q and analyzer %s", act.pkg.Name, act.a.Name)
+ return false
+ }
+
+ factsCacheDebugf("Loaded %d cached facts for package %q and analyzer %s", len(facts), act.pkg.Name, act.a.Name)
+
+ for _, f := range facts {
+ if f.Path == "" { // this is a package fact
+ key := packageFactKey{act.pkg.Types, act.factType(f.Fact)}
+ act.packageFacts[key] = f.Fact
+ continue
+ }
+ obj, err := objectpath.Object(act.pkg.Types, objectpath.Path(f.Path))
+ if err != nil {
+ // Be lenient about these errors. For example, when
+ // analyzing io/ioutil from source, we may get a fact
+ // for methods on the devNull type, and objectpath
+ // will happily create a path for them. However, when
+ // we later load io/ioutil from export data, the path
+ // no longer resolves.
+ //
+ // If an exported type embeds the unexported type,
+ // then (part of) the unexported type will become part
+ // of the type information and our path will resolve
+ // again.
+ continue
+ }
+ factKey := objectFactKey{obj, act.factType(f.Fact)}
+ act.objectFacts[factKey] = f.Fact
+ }
+
+ return true
+}
+
+func factCacheKey(a *analysis.Analyzer) string {
+ return fmt.Sprintf("%s/facts", a.Name)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go
new file mode 100644
index 000000000..d868f8f5d
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_base.go
@@ -0,0 +1,370 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+//
+// Partial copy of https://github.com/golang/tools/blob/dba5486c2a1d03519930812112b23ed2c45c04fc/go/analysis/internal/checker/checker.go
+
+package goanalysis
+
+import (
+ "bytes"
+ "encoding/gob"
+ "errors"
+ "fmt"
+ "go/types"
+ "reflect"
+ "time"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
+)
+
+// NOTE(ldez) altered: custom fields; remove 'once' and 'duration'.
+// An action represents one unit of analysis work: the application of
+// one analysis to one package. Actions form a DAG, both within a
+// package (as different analyzers are applied, either in sequence or
+// parallel), and across packages (as dependencies are analyzed).
+type action struct {
+ a *analysis.Analyzer
+ pkg *packages.Package
+ pass *analysis.Pass
+ isroot bool
+ deps []*action
+ objectFacts map[objectFactKey]analysis.Fact
+ packageFacts map[packageFactKey]analysis.Fact
+ result any
+ diagnostics []analysis.Diagnostic
+ err error
+
+ // NOTE(ldez) custom fields.
+ r *runner
+ analysisDoneCh chan struct{}
+ loadCachedFactsDone bool
+ loadCachedFactsOk bool
+ isInitialPkg bool
+ needAnalyzeSource bool
+}
+
+// NOTE(ldez) no alteration.
+type objectFactKey struct {
+ obj types.Object
+ typ reflect.Type
+}
+
+// NOTE(ldez) no alteration.
+type packageFactKey struct {
+ pkg *types.Package
+ typ reflect.Type
+}
+
+// NOTE(ldez) no alteration.
+func (act *action) String() string {
+ return fmt.Sprintf("%s@%s", act.a, act.pkg)
+}
+
+// NOTE(ldez) altered version of `func (act *action) execOnce()`.
+func (act *action) analyze() {
+ defer close(act.analysisDoneCh) // unblock actions depending on this action
+
+ if !act.needAnalyzeSource {
+ return
+ }
+
+ defer func(now time.Time) {
+ analyzeDebugf("go/analysis: %s: %s: analyzed package %q in %s", act.r.prefix, act.a.Name, act.pkg.Name, time.Since(now))
+ }(time.Now())
+
+ // Report an error if any dependency failures.
+ var depErrors error
+ for _, dep := range act.deps {
+ if dep.err != nil {
+ depErrors = errors.Join(depErrors, errors.Unwrap(dep.err))
+ }
+ }
+
+ if depErrors != nil {
+ act.err = fmt.Errorf("failed prerequisites: %w", depErrors)
+ return
+ }
+
+ // Plumb the output values of the dependencies
+ // into the inputs of this action. Also facts.
+ inputs := make(map[*analysis.Analyzer]any)
+ act.objectFacts = make(map[objectFactKey]analysis.Fact)
+ act.packageFacts = make(map[packageFactKey]analysis.Fact)
+ startedAt := time.Now()
+
+ for _, dep := range act.deps {
+ if dep.pkg == act.pkg {
+ // Same package, different analysis (horizontal edge):
+ // in-memory outputs of prerequisite analyzers
+ // become inputs to this analysis pass.
+ inputs[dep.a] = dep.result
+ } else if dep.a == act.a { // (always true)
+ // Same analysis, different package (vertical edge):
+ // serialized facts produced by prerequisite analysis
+ // become available to this analysis pass.
+ inheritFacts(act, dep)
+ }
+ }
+
+ factsDebugf("%s: Inherited facts in %s", act, time.Since(startedAt))
+
+ module := &analysis.Module{} // possibly empty (non nil) in go/analysis drivers.
+ if mod := act.pkg.Module; mod != nil {
+ module.Path = mod.Path
+ module.Version = mod.Version
+ module.GoVersion = mod.GoVersion
+ }
+
+ // Run the analysis.
+ pass := &analysis.Pass{
+ Analyzer: act.a,
+ Fset: act.pkg.Fset,
+ Files: act.pkg.Syntax,
+ OtherFiles: act.pkg.OtherFiles,
+ IgnoredFiles: act.pkg.IgnoredFiles,
+ Pkg: act.pkg.Types,
+ TypesInfo: act.pkg.TypesInfo,
+ TypesSizes: act.pkg.TypesSizes,
+ TypeErrors: act.pkg.TypeErrors,
+ Module: module,
+
+ ResultOf: inputs,
+ Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
+ ImportObjectFact: act.importObjectFact,
+ ExportObjectFact: act.exportObjectFact,
+ ImportPackageFact: act.importPackageFact,
+ ExportPackageFact: act.exportPackageFact,
+ AllObjectFacts: act.allObjectFacts,
+ AllPackageFacts: act.allPackageFacts,
+ }
+
+ act.pass = pass
+ act.r.passToPkgGuard.Lock()
+ act.r.passToPkg[pass] = act.pkg
+ act.r.passToPkgGuard.Unlock()
+
+ if act.pkg.IllTyped {
+ // It looks like there should be !pass.Analyzer.RunDespiteErrors
+ // but govet's cgocall crashes on it. Govet itself contains !pass.Analyzer.RunDespiteErrors condition here,
+ // but it exits before it if packages.Load have failed.
+ act.err = fmt.Errorf("analysis skipped: %w", &pkgerrors.IllTypedError{Pkg: act.pkg})
+ } else {
+ startedAt = time.Now()
+
+ act.result, act.err = pass.Analyzer.Run(pass)
+
+ analyzedIn := time.Since(startedAt)
+ if analyzedIn > time.Millisecond*10 {
+ debugf("%s: run analyzer in %s", act, analyzedIn)
+ }
+ }
+
+ // disallow calls after Run
+ pass.ExportObjectFact = nil
+ pass.ExportPackageFact = nil
+
+ err := act.persistFactsToCache()
+ if err != nil {
+ act.r.log.Warnf("Failed to persist facts to cache: %s", err)
+ }
+}
+
+// NOTE(ldez) altered: logger; serialize.
+// inheritFacts populates act.facts with
+// those it obtains from its dependency, dep.
+func inheritFacts(act, dep *action) {
+ const serialize = false
+
+ for key, fact := range dep.objectFacts {
+ // Filter out facts related to objects
+ // that are irrelevant downstream
+ // (equivalently: not in the compiler export data).
+ if !exportedFrom(key.obj, dep.pkg.Types) {
+ factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact)
+ continue
+ }
+
+ // Optionally serialize/deserialize fact
+ // to verify that it works across address spaces.
+ if serialize {
+ encodedFact, err := codeFact(fact)
+ if err != nil {
+ act.r.log.Panicf("internal error: encoding of %T fact failed in %v: %v", fact, act, err)
+ }
+ fact = encodedFact
+ }
+
+ factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact)
+
+ act.objectFacts[key] = fact
+ }
+
+ for key, fact := range dep.packageFacts {
+ // TODO: filter out facts that belong to
+ // packages not mentioned in the export data
+ // to prevent side channels.
+
+ // Optionally serialize/deserialize fact
+ // to verify that it works across address spaces
+ // and is deterministic.
+ if serialize {
+ encodedFact, err := codeFact(fact)
+ if err != nil {
+ act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
+ }
+ fact = encodedFact
+ }
+
+ factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact)
+
+ act.packageFacts[key] = fact
+ }
+}
+
+// NOTE(ldez) no alteration.
+// codeFact encodes then decodes a fact,
+// just to exercise that logic.
+func codeFact(fact analysis.Fact) (analysis.Fact, error) {
+ // We encode facts one at a time.
+ // A real modular driver would emit all facts
+ // into one encoder to improve gob efficiency.
+ var buf bytes.Buffer
+ if err := gob.NewEncoder(&buf).Encode(fact); err != nil {
+ return nil, err
+ }
+
+ // Encode it twice and assert that we get the same bits.
+ // This helps detect nondeterministic Gob encoding (e.g. of maps).
+ var buf2 bytes.Buffer
+ if err := gob.NewEncoder(&buf2).Encode(fact); err != nil {
+ return nil, err
+ }
+ if !bytes.Equal(buf.Bytes(), buf2.Bytes()) {
+ return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact)
+ }
+
+ newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact)
+ if err := gob.NewDecoder(&buf).Decode(newFact); err != nil {
+ return nil, err
+ }
+ return newFact, nil
+}
+
+// NOTE(ldez) no alteration.
+// exportedFrom reports whether obj may be visible to a package that imports pkg.
+// This includes not just the exported members of pkg, but also unexported
+// constants, types, fields, and methods, perhaps belonging to other packages,
+// that find there way into the API.
+// This is an over-approximation of the more accurate approach used by
+// gc export data, which walks the type graph, but it's much simpler.
+//
+// TODO(adonovan): do more accurate filtering by walking the type graph.
+func exportedFrom(obj types.Object, pkg *types.Package) bool {
+ switch obj := obj.(type) {
+ case *types.Func:
+ return obj.Exported() && obj.Pkg() == pkg ||
+ obj.Type().(*types.Signature).Recv() != nil
+ case *types.Var:
+ if obj.IsField() {
+ return true
+ }
+ // we can't filter more aggressively than this because we need
+ // to consider function parameters exported, but have no way
+ // of telling apart function parameters from local variables.
+ return obj.Pkg() == pkg
+ case *types.TypeName, *types.Const:
+ return true
+ }
+ return false // Nil, Builtin, Label, or PkgName
+}
+
+// NOTE(ldez) altered: logger; `act.factType`
+// importObjectFact implements Pass.ImportObjectFact.
+// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
+// importObjectFact copies the fact value to *ptr.
+func (act *action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
+ if obj == nil {
+ panic("nil object")
+ }
+ key := objectFactKey{obj, act.factType(ptr)}
+ if v, ok := act.objectFacts[key]; ok {
+ reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+ return true
+ }
+ return false
+}
+
+// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`.
+// exportObjectFact implements Pass.ExportObjectFact.
+func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
+ if obj.Pkg() != act.pkg.Types {
+ act.r.log.Panicf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
+ act.a, act.pkg, obj, fact)
+ }
+
+ key := objectFactKey{obj, act.factType(fact)}
+ act.objectFacts[key] = fact // clobber any existing entry
+ if isFactsExportDebug {
+ objstr := types.ObjectString(obj, (*types.Package).Name)
+
+ factsExportDebugf("%s: object %s has fact %s\n",
+ act.pkg.Fset.Position(obj.Pos()), objstr, fact)
+ }
+}
+
+// NOTE(ldez) no alteration.
+func (act *action) allObjectFacts() []analysis.ObjectFact {
+ facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
+ for k := range act.objectFacts {
+ facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
+ }
+ return facts
+}
+
+// NOTE(ldez) altered: `act.factType`
+// importPackageFact implements Pass.ImportPackageFact.
+// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
+// fact copies the fact value to *ptr.
+func (act *action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
+ if pkg == nil {
+ panic("nil package")
+ }
+ key := packageFactKey{pkg, act.factType(ptr)}
+ if v, ok := act.packageFacts[key]; ok {
+ reflect.ValueOf(ptr).Elem().Set(reflect.ValueOf(v).Elem())
+ return true
+ }
+ return false
+}
+
+// NOTE(ldez) altered: removes code related to `act.pass.ExportPackageFact`; logger; `act.factType`.
+// exportPackageFact implements Pass.ExportPackageFact.
+func (act *action) exportPackageFact(fact analysis.Fact) {
+ key := packageFactKey{act.pass.Pkg, act.factType(fact)}
+ act.packageFacts[key] = fact // clobber any existing entry
+
+ factsDebugf("%s: package %s has fact %s\n",
+ act.pkg.Fset.Position(act.pass.Files[0].Pos()), act.pass.Pkg.Path(), fact)
+}
+
+// NOTE(ldez) altered: add receiver to handle logs.
+func (act *action) factType(fact analysis.Fact) reflect.Type {
+ t := reflect.TypeOf(fact)
+ if t.Kind() != reflect.Ptr {
+ act.r.log.Fatalf("invalid Fact type: got %T, want pointer", fact)
+ }
+ return t
+}
+
+// NOTE(ldez) no alteration.
+func (act *action) allPackageFacts() []analysis.PackageFact {
+ facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
+ for k := range act.packageFacts {
+ facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
+ }
+ return facts
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
deleted file mode 100644
index 1d0fb974e..000000000
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_facts.go
+++ /dev/null
@@ -1,125 +0,0 @@
-package goanalysis
-
-import (
- "bytes"
- "encoding/gob"
- "fmt"
- "go/types"
- "reflect"
-
- "golang.org/x/tools/go/analysis"
-)
-
-type objectFactKey struct {
- obj types.Object
- typ reflect.Type
-}
-
-type packageFactKey struct {
- pkg *types.Package
- typ reflect.Type
-}
-
-type Fact struct {
- Path string // non-empty only for object facts
- Fact analysis.Fact
-}
-
-// inheritFacts populates act.facts with
-// those it obtains from its dependency, dep.
-func inheritFacts(act, dep *action) {
- serialize := false
-
- for key, fact := range dep.objectFacts {
- // Filter out facts related to objects
- // that are irrelevant downstream
- // (equivalently: not in the compiler export data).
- if !exportedFrom(key.obj, dep.pkg.Types) {
- factsInheritDebugf("%v: discarding %T fact from %s for %s: %s", act, fact, dep, key.obj, fact)
- continue
- }
-
- // Optionally serialize/deserialize fact
- // to verify that it works across address spaces.
- if serialize {
- var err error
- fact, err = codeFact(fact)
- if err != nil {
- act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
- }
- }
-
- factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.obj, fact)
- act.objectFacts[key] = fact
- }
-
- for key, fact := range dep.packageFacts {
- // TODO: filter out facts that belong to
- // packages not mentioned in the export data
- // to prevent side channels.
-
- // Optionally serialize/deserialize fact
- // to verify that it works across address spaces
- // and is deterministic.
- if serialize {
- var err error
- fact, err = codeFact(fact)
- if err != nil {
- act.r.log.Panicf("internal error: encoding of %T fact failed in %v", fact, act)
- }
- }
-
- factsInheritDebugf("%v: inherited %T fact for %s: %s", act, fact, key.pkg.Path(), fact)
- act.packageFacts[key] = fact
- }
-}
-
-// codeFact encodes then decodes a fact,
-// just to exercise that logic.
-func codeFact(fact analysis.Fact) (analysis.Fact, error) {
- // We encode facts one at a time.
- // A real modular driver would emit all facts
- // into one encoder to improve gob efficiency.
- var buf bytes.Buffer
- if err := gob.NewEncoder(&buf).Encode(fact); err != nil {
- return nil, err
- }
-
- // Encode it twice and assert that we get the same bits.
- // This helps detect nondeterministic Gob encoding (e.g. of maps).
- var buf2 bytes.Buffer
- if err := gob.NewEncoder(&buf2).Encode(fact); err != nil {
- return nil, err
- }
- if !bytes.Equal(buf.Bytes(), buf2.Bytes()) {
- return nil, fmt.Errorf("encoding of %T fact is nondeterministic", fact)
- }
-
- newFact := reflect.New(reflect.TypeOf(fact).Elem()).Interface().(analysis.Fact)
- if err := gob.NewDecoder(&buf).Decode(newFact); err != nil {
- return nil, err
- }
- return newFact, nil
-}
-
-// exportedFrom reports whether obj may be visible to a package that imports pkg.
-// This includes not just the exported members of pkg, but also unexported
-// constants, types, fields, and methods, perhaps belonging to other packages,
-// that find there way into the API.
-// This is an over-approximation of the more accurate approach used by
-// gc export data, which walks the type graph, but it's much simpler.
-//
-// TODO(adonovan): do more accurate filtering by walking the type graph.
-func exportedFrom(obj types.Object, pkg *types.Package) bool {
- switch obj := obj.(type) {
- case *types.Func:
- return obj.Exported() && obj.Pkg() == pkg ||
- obj.Type().(*types.Signature).Recv() != nil
- case *types.Var:
- return obj.Exported() && obj.Pkg() == pkg ||
- obj.IsField()
- case *types.TypeName, *types.Const:
- return true
- }
- return false // Nil, Builtin, Label, or PkgName
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
index 8abe2b6c1..44d676958 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runner_loadingpackage.go
@@ -4,11 +4,13 @@ import (
"errors"
"fmt"
"go/ast"
+ "go/build"
"go/parser"
"go/scanner"
"go/types"
"os"
"reflect"
+ "strings"
"sync"
"sync/atomic"
@@ -152,10 +154,15 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error {
return imp.Types, nil
}
- // TODO(ldez) temporary workaround
- rv, err := goutil.CleanRuntimeVersion()
- if err != nil {
- return err
+ var goVersion string
+ if pkg.Module != nil && pkg.Module.GoVersion != "" {
+ goVersion = "go" + strings.TrimPrefix(pkg.Module.GoVersion, "go")
+ } else {
+ var err error
+ goVersion, err = goutil.CleanRuntimeVersion()
+ if err != nil {
+ return err
+ }
}
tc := &types.Config{
@@ -163,7 +170,8 @@ func (lp *loadingPackage) loadFromSource(loadMode LoadMode) error {
Error: func(err error) {
pkg.Errors = append(pkg.Errors, lp.convertError(err)...)
},
- GoVersion: rv, // TODO(ldez) temporary workaround
+ GoVersion: goVersion,
+ Sizes: types.SizesFor(build.Default.Compiler, build.Default.GOARCH),
}
_ = types.NewChecker(tc, pkg.Fset, pkg.Types, pkg.TypesInfo).Files(pkg.Syntax)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
index 79e52f52a..a9aee03a2 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners.go
@@ -2,17 +2,10 @@ package goanalysis
import (
"fmt"
- "runtime"
- "sort"
- "strings"
- "sync"
- "sync/atomic"
- "time"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
- "github.com/golangci/golangci-lint/internal/pkgcache"
"github.com/golangci/golangci-lint/pkg/goanalysis/pkgerrors"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -119,156 +112,3 @@ func buildIssues(diags []Diagnostic, linterNameBuilder func(diag *Diagnostic) st
}
return issues
}
-
-func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
- return "lint/result:" + analyzersHashID(analyzers)
-}
-
-func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
- issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer,
-) {
- startedAt := time.Now()
- perPkgIssues := map[*packages.Package][]result.Issue{}
- for ind := range issues {
- i := &issues[ind]
- perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i)
- }
-
- var savedIssuesCount int64 = 0
- lintResKey := getIssuesCacheKey(analyzers)
-
- workerCount := runtime.GOMAXPROCS(-1)
- var wg sync.WaitGroup
- wg.Add(workerCount)
-
- pkgCh := make(chan *packages.Package, len(allPkgs))
- for i := 0; i < workerCount; i++ {
- go func() {
- defer wg.Done()
- for pkg := range pkgCh {
- pkgIssues := perPkgIssues[pkg]
- encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
- for ind := range pkgIssues {
- i := &pkgIssues[ind]
- encodedIssues = append(encodedIssues, EncodingIssue{
- FromLinter: i.FromLinter,
- Text: i.Text,
- Severity: i.Severity,
- Pos: i.Pos,
- LineRange: i.LineRange,
- Replacement: i.Replacement,
- ExpectNoLint: i.ExpectNoLint,
- ExpectedNoLintLinter: i.ExpectedNoLintLinter,
- })
- }
-
- atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues)))
- if err := lintCtx.PkgCache.Put(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil {
- lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err)
- } else {
- issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
- }
- }
- }()
- }
-
- for _, pkg := range allPkgs {
- if pkgsFromCache[pkg] {
- continue
- }
-
- pkgCh <- pkg
- }
- close(pkgCh)
- wg.Wait()
-
- issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
-}
-
-func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
- analyzers []*analysis.Analyzer,
-) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) {
- startedAt := time.Now()
-
- lintResKey := getIssuesCacheKey(analyzers)
- type cacheRes struct {
- issues []result.Issue
- loadErr error
- }
- pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs))
- for _, pkg := range pkgs {
- pkgToCacheRes[pkg] = &cacheRes{}
- }
-
- workerCount := runtime.GOMAXPROCS(-1)
- var wg sync.WaitGroup
- wg.Add(workerCount)
-
- pkgCh := make(chan *packages.Package, len(pkgs))
- for range workerCount {
- go func() {
- defer wg.Done()
- for pkg := range pkgCh {
- var pkgIssues []EncodingIssue
- err := lintCtx.PkgCache.Get(pkg, pkgcache.HashModeNeedAllDeps, lintResKey, &pkgIssues)
- cacheRes := pkgToCacheRes[pkg]
- cacheRes.loadErr = err
- if err != nil {
- continue
- }
- if len(pkgIssues) == 0 {
- continue
- }
-
- issues := make([]result.Issue, 0, len(pkgIssues))
- for i := range pkgIssues {
- issue := &pkgIssues[i]
- issues = append(issues, result.Issue{
- FromLinter: issue.FromLinter,
- Text: issue.Text,
- Severity: issue.Severity,
- Pos: issue.Pos,
- LineRange: issue.LineRange,
- Replacement: issue.Replacement,
- Pkg: pkg,
- ExpectNoLint: issue.ExpectNoLint,
- ExpectedNoLintLinter: issue.ExpectedNoLintLinter,
- })
- }
- cacheRes.issues = issues
- }
- }()
- }
-
- for _, pkg := range pkgs {
- pkgCh <- pkg
- }
- close(pkgCh)
- wg.Wait()
-
- loadedIssuesCount := 0
- pkgsFromCache = map[*packages.Package]bool{}
- for pkg, cacheRes := range pkgToCacheRes {
- if cacheRes.loadErr == nil {
- loadedIssuesCount += len(cacheRes.issues)
- pkgsFromCache[pkg] = true
- issuesFromCache = append(issuesFromCache, cacheRes.issues...)
- issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
- } else {
- issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
- }
- }
- issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
- loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
- return issuesFromCache, pkgsFromCache
-}
-
-func analyzersHashID(analyzers []*analysis.Analyzer) string {
- names := make([]string, 0, len(analyzers))
- for _, a := range analyzers {
- names = append(names, a.Name)
- }
-
- sort.Strings(names)
- return strings.Join(names, ",")
-}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go
new file mode 100644
index 000000000..8c244688b
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/goanalysis/runners_cache.go
@@ -0,0 +1,172 @@
+package goanalysis
+
+import (
+ "runtime"
+ "sort"
+ "strings"
+ "sync"
+ "sync/atomic"
+ "time"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/packages"
+
+ "github.com/golangci/golangci-lint/internal/cache"
+ "github.com/golangci/golangci-lint/pkg/lint/linter"
+ "github.com/golangci/golangci-lint/pkg/result"
+)
+
+func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.Package]bool,
+ issues []result.Issue, lintCtx *linter.Context, analyzers []*analysis.Analyzer,
+) {
+ startedAt := time.Now()
+ perPkgIssues := map[*packages.Package][]result.Issue{}
+ for ind := range issues {
+ i := &issues[ind]
+ perPkgIssues[i.Pkg] = append(perPkgIssues[i.Pkg], *i)
+ }
+
+ var savedIssuesCount int64 = 0
+ lintResKey := getIssuesCacheKey(analyzers)
+
+ workerCount := runtime.GOMAXPROCS(-1)
+ var wg sync.WaitGroup
+ wg.Add(workerCount)
+
+ pkgCh := make(chan *packages.Package, len(allPkgs))
+ for i := 0; i < workerCount; i++ {
+ go func() {
+ defer wg.Done()
+ for pkg := range pkgCh {
+ pkgIssues := perPkgIssues[pkg]
+ encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
+ for ind := range pkgIssues {
+ i := &pkgIssues[ind]
+ encodedIssues = append(encodedIssues, EncodingIssue{
+ FromLinter: i.FromLinter,
+ Text: i.Text,
+ Severity: i.Severity,
+ Pos: i.Pos,
+ LineRange: i.LineRange,
+ Replacement: i.Replacement,
+ ExpectNoLint: i.ExpectNoLint,
+ ExpectedNoLintLinter: i.ExpectedNoLintLinter,
+ })
+ }
+
+ atomic.AddInt64(&savedIssuesCount, int64(len(encodedIssues)))
+ if err := lintCtx.PkgCache.Put(pkg, cache.HashModeNeedAllDeps, lintResKey, encodedIssues); err != nil {
+ lintCtx.Log.Infof("Failed to save package %s issues (%d) to cache: %s", pkg, len(pkgIssues), err)
+ } else {
+ issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
+ }
+ }
+ }()
+ }
+
+ for _, pkg := range allPkgs {
+ if pkgsFromCache[pkg] {
+ continue
+ }
+
+ pkgCh <- pkg
+ }
+ close(pkgCh)
+ wg.Wait()
+
+ lintCtx.PkgCache.Close()
+
+ issuesCacheDebugf("Saved %d issues from %d packages to cache in %s", savedIssuesCount, len(allPkgs), time.Since(startedAt))
+}
+
+func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
+ analyzers []*analysis.Analyzer,
+) (issuesFromCache []result.Issue, pkgsFromCache map[*packages.Package]bool) {
+ startedAt := time.Now()
+
+ lintResKey := getIssuesCacheKey(analyzers)
+ type cacheRes struct {
+ issues []result.Issue
+ loadErr error
+ }
+ pkgToCacheRes := make(map[*packages.Package]*cacheRes, len(pkgs))
+ for _, pkg := range pkgs {
+ pkgToCacheRes[pkg] = &cacheRes{}
+ }
+
+ workerCount := runtime.GOMAXPROCS(-1)
+ var wg sync.WaitGroup
+ wg.Add(workerCount)
+
+ pkgCh := make(chan *packages.Package, len(pkgs))
+ for range workerCount {
+ go func() {
+ defer wg.Done()
+ for pkg := range pkgCh {
+ var pkgIssues []EncodingIssue
+ err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues)
+ cacheRes := pkgToCacheRes[pkg]
+ cacheRes.loadErr = err
+ if err != nil {
+ continue
+ }
+ if len(pkgIssues) == 0 {
+ continue
+ }
+
+ issues := make([]result.Issue, 0, len(pkgIssues))
+ for i := range pkgIssues {
+ issue := &pkgIssues[i]
+ issues = append(issues, result.Issue{
+ FromLinter: issue.FromLinter,
+ Text: issue.Text,
+ Severity: issue.Severity,
+ Pos: issue.Pos,
+ LineRange: issue.LineRange,
+ Replacement: issue.Replacement,
+ Pkg: pkg,
+ ExpectNoLint: issue.ExpectNoLint,
+ ExpectedNoLintLinter: issue.ExpectedNoLintLinter,
+ })
+ }
+ cacheRes.issues = issues
+ }
+ }()
+ }
+
+ for _, pkg := range pkgs {
+ pkgCh <- pkg
+ }
+ close(pkgCh)
+ wg.Wait()
+
+ loadedIssuesCount := 0
+ pkgsFromCache = map[*packages.Package]bool{}
+ for pkg, cacheRes := range pkgToCacheRes {
+ if cacheRes.loadErr == nil {
+ loadedIssuesCount += len(cacheRes.issues)
+ pkgsFromCache[pkg] = true
+ issuesFromCache = append(issuesFromCache, cacheRes.issues...)
+ issuesCacheDebugf("Loaded package %s issues (%d) from cache", pkg, len(cacheRes.issues))
+ } else {
+ issuesCacheDebugf("Didn't load package %s issues from cache: %s", pkg, cacheRes.loadErr)
+ }
+ }
+ issuesCacheDebugf("Loaded %d issues from cache in %s, analyzing %d/%d packages",
+ loadedIssuesCount, time.Since(startedAt), len(pkgs)-len(pkgsFromCache), len(pkgs))
+ return issuesFromCache, pkgsFromCache
+}
+
+func getIssuesCacheKey(analyzers []*analysis.Analyzer) string {
+ return "lint/result:" + analyzersHashID(analyzers)
+}
+
+func analyzersHashID(analyzers []*analysis.Analyzer) string {
+ names := make([]string, 0, len(analyzers))
+ for _, a := range analyzers {
+ names = append(names, a.Name)
+ }
+
+ sort.Strings(names)
+ return strings.Join(names, ",")
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
index eb8c0577a..13baba5a6 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/cyclop/cyclop.go
@@ -30,7 +30,7 @@ func New(settings *config.Cyclop) *goanalysis.Linter {
return goanalysis.NewLinter(
a.Name,
- "checks function and package cyclomatic complexity",
+ a.Doc,
[]*analysis.Analyzer{a},
cfg,
).WithLoadMode(goanalysis.LoadModeTypesInfo)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
index 54d207257..9873c9ba4 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/ginkgolinter/ginkgolinter.go
@@ -23,8 +23,9 @@ func New(settings *config.GinkgoLinterSettings) *goanalysis.Linter {
SuppressTypeCompare: types.Boolean(settings.SuppressTypeCompareWarning),
AllowHaveLen0: types.Boolean(settings.AllowHaveLenZero),
ForceExpectTo: types.Boolean(settings.ForceExpectTo),
- ValidateAsyncIntervals: types.Boolean(settings.ForbidSpecPollution),
- ForbidSpecPollution: types.Boolean(settings.ValidateAsyncIntervals),
+ ValidateAsyncIntervals: types.Boolean(settings.ValidateAsyncIntervals),
+ ForbidSpecPollution: types.Boolean(settings.ForbidSpecPollution),
+ ForceSucceedForFuncs: types.Boolean(settings.ForceSucceedForFuncs),
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
index 446f0e564..7aab0efeb 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gochecksumtype/gochecksumtype.go
@@ -8,6 +8,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
+ "github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/goanalysis"
"github.com/golangci/golangci-lint/pkg/lint/linter"
"github.com/golangci/golangci-lint/pkg/result"
@@ -15,7 +16,7 @@ import (
const linterName = "gochecksumtype"
-func New() *goanalysis.Linter {
+func New(settings *config.GoChecksumTypeSettings) *goanalysis.Linter {
var mu sync.Mutex
var resIssues []goanalysis.Issue
@@ -23,7 +24,7 @@ func New() *goanalysis.Linter {
Name: linterName,
Doc: goanalysis.TheOnlyanalyzerDoc,
Run: func(pass *analysis.Pass) (any, error) {
- issues, err := runGoCheckSumType(pass)
+ issues, err := runGoCheckSumType(pass, settings)
if err != nil {
return nil, err
}
@@ -50,7 +51,7 @@ func New() *goanalysis.Linter {
}).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
-func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) {
+func runGoCheckSumType(pass *analysis.Pass, settings *config.GoChecksumTypeSettings) ([]goanalysis.Issue, error) {
var resIssues []goanalysis.Issue
pkg := &packages.Package{
@@ -61,7 +62,8 @@ func runGoCheckSumType(pass *analysis.Pass) ([]goanalysis.Issue, error) {
}
var unknownError error
- errors := gochecksumtype.Run([]*packages.Package{pkg})
+ errors := gochecksumtype.Run([]*packages.Package{pkg},
+ gochecksumtype.Config{DefaultSignifiesExhaustive: settings.DefaultSignifiesExhaustive})
for _, err := range errors {
err, ok := err.(gochecksumtype.Error)
if !ok {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
index 68cc338e4..194ea3535 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/gocritic/gocritic.go
@@ -8,6 +8,7 @@ import (
"path/filepath"
"reflect"
"runtime"
+ "slices"
"sort"
"strings"
"sync"
@@ -16,7 +17,6 @@ import (
gocriticlinter "github.com/go-critic/go-critic/linter"
_ "github.com/quasilyte/go-ruleguard/dsl"
"golang.org/x/exp/maps"
- "golang.org/x/exp/slices"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/config"
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
index 14d517fb3..c6b1aae6b 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goheader/goheader.go
@@ -51,7 +51,7 @@ func New(settings *config.GoHeaderSettings) *goanalysis.Linter {
return goanalysis.NewLinter(
linterName,
- "Checks is file header matches to pattern",
+ "Checks if file header matches to pattern",
[]*analysis.Analyzer{analyzer},
nil,
).WithIssuesReporter(func(*linter.Context) []goanalysis.Issue {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
index 85154a9b3..c206ffaa3 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/goprintffuncname/goprintffuncname.go
@@ -1,7 +1,7 @@
package goprintffuncname
import (
- "github.com/jirfag/go-printf-func-name/pkg/analyzer"
+ "github.com/golangci/go-printf-func-name/pkg/analyzer"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/goanalysis"
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go
new file mode 100644
index 000000000..31f88160e
--- /dev/null
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/iface/iface.go
@@ -0,0 +1,57 @@
+package iface
+
+import (
+ "slices"
+
+ "github.com/uudashr/iface/identical"
+ "github.com/uudashr/iface/opaque"
+ "github.com/uudashr/iface/unused"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/golangci/golangci-lint/pkg/config"
+ "github.com/golangci/golangci-lint/pkg/goanalysis"
+)
+
+func New(settings *config.IfaceSettings) *goanalysis.Linter {
+ var conf map[string]map[string]any
+ if settings != nil {
+ conf = settings.Settings
+ }
+
+ return goanalysis.NewLinter(
+ "iface",
+ "Detect the incorrect use of interfaces, helping developers avoid interface pollution.",
+ analyzersFromSettings(settings),
+ conf,
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
+}
+
+func analyzersFromSettings(settings *config.IfaceSettings) []*analysis.Analyzer {
+ allAnalyzers := map[string]*analysis.Analyzer{
+ "identical": identical.Analyzer,
+ "unused": unused.Analyzer,
+ "opaque": opaque.Analyzer,
+ }
+
+ if settings == nil || len(settings.Enable) == 0 {
+ // Default enable `identical` analyzer only
+ return []*analysis.Analyzer{identical.Analyzer}
+ }
+
+ var analyzers []*analysis.Analyzer
+ for _, name := range uniqueNames(settings.Enable) {
+ if _, ok := allAnalyzers[name]; !ok {
+ // skip unknown analyzer
+ continue
+ }
+
+ analyzers = append(analyzers, allAnalyzers[name])
+ }
+
+ return analyzers
+}
+
+func uniqueNames(names []string) []string {
+ slices.Sort(names)
+ return slices.Compact(names)
+}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
index 958013d0d..e5a0e33b7 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/internal/staticcheck_common.go
@@ -9,11 +9,8 @@ import (
scconfig "honnef.co/go/tools/config"
"github.com/golangci/golangci-lint/pkg/config"
- "github.com/golangci/golangci-lint/pkg/logutils"
)
-var debugf = logutils.Debug(logutils.DebugKeyMegacheck)
-
func SetupStaticCheckAnalyzers(src []*lint.Analyzer, checks []string) []*analysis.Analyzer {
var names []string
for _, a := range src {
@@ -32,14 +29,6 @@ func SetupStaticCheckAnalyzers(src []*lint.Analyzer, checks []string) []*analysi
return ret
}
-func SetAnalyzerGoVersion(a *analysis.Analyzer, goVersion string) {
- if v := a.Flags.Lookup("go"); v != nil {
- if err := v.Value.Set(goVersion); err != nil {
- debugf("Failed to set go version: %s", err)
- }
- }
-}
-
func StaticCheckConfig(settings *config.StaticCheckSettings) *scconfig.Config {
var cfg *scconfig.Config
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
index 9aa8692ff..fe64653b9 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/mnd/mnd.go
@@ -12,27 +12,6 @@ func New(settings *config.MndSettings) *goanalysis.Linter {
return newMND(mnd.Analyzer, settings, nil)
}
-func NewGoMND(settings *config.GoMndSettings) *goanalysis.Linter {
- // shallow copy because mnd.Analyzer is a global variable.
- a := new(analysis.Analyzer)
- *a = *mnd.Analyzer
-
- // Used to force the analyzer name to use the same name as the linter.
- // This is required to avoid displaying the analyzer name inside the issue text.
- a.Name = "gomnd"
-
- var linterCfg map[string]map[string]any
-
- if settings != nil && len(settings.Settings) > 0 {
- // Convert deprecated setting.
- linterCfg = map[string]map[string]any{
- a.Name: settings.Settings["mnd"],
- }
- }
-
- return newMND(a, &settings.MndSettings, linterCfg)
-}
-
func newMND(a *analysis.Analyzer, settings *config.MndSettings, linterCfg map[string]map[string]any) *goanalysis.Linter {
if len(linterCfg) == 0 && settings != nil {
cfg := make(map[string]any)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
index beabf2cd8..e69fa5e9f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nakedret/nakedret.go
@@ -14,7 +14,7 @@ func New(settings *config.NakedretSettings) *goanalysis.Linter {
maxLines = settings.MaxFuncLines
}
- a := nakedret.NakedReturnAnalyzer(maxLines)
+ a := nakedret.NakedReturnAnalyzer(maxLines, false)
return goanalysis.NewLinter(
a.Name,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
index c9237035d..d8d677d99 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/nilnil/nilnil.go
@@ -1,8 +1,6 @@
package nilnil
import (
- "strings"
-
"github.com/Antonboom/nilnil/pkg/analyzer"
"golang.org/x/tools/go/analysis"
@@ -10,13 +8,16 @@ import (
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
-func New(cfg *config.NilNilSettings) *goanalysis.Linter {
+func New(settings *config.NilNilSettings) *goanalysis.Linter {
a := analyzer.New()
cfgMap := make(map[string]map[string]any)
- if cfg != nil && len(cfg.CheckedTypes) != 0 {
+ if settings != nil {
cfgMap[a.Name] = map[string]any{
- "checked-types": strings.Join(cfg.CheckedTypes, ","),
+ "detect-opposite": settings.DetectOpposite,
+ }
+ if len(settings.CheckedTypes) != 0 {
+ cfgMap[a.Name]["checked-types"] = settings.CheckedTypes
}
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go
index 3832873c6..8b030f15d 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/execinquery/execinquery.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/recvcheck/recvcheck.go
@@ -1,14 +1,14 @@
-package execinquery
+package recvcheck
import (
- "github.com/lufeee/execinquery"
+ "github.com/raeperd/recvcheck"
"golang.org/x/tools/go/analysis"
"github.com/golangci/golangci-lint/pkg/goanalysis"
)
func New() *goanalysis.Linter {
- a := execinquery.Analyzer
+ a := recvcheck.Analyzer
return goanalysis.NewLinter(
a.Name,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
index 90ce15db6..056a258e0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/revive/revive.go
@@ -184,8 +184,8 @@ func toIssue(pass *analysis.Pass, object *jsonObject) goanalysis.Issue {
// This function mimics the GetConfig function of revive.
// This allows to get default values and right types.
// https://github.com/golangci/golangci-lint/issues/1745
-// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L217
-// https://github.com/mgechev/revive/blob/v1.3.7/config/config.go#L169-L174
+// https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L220
+// https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L172-L178
func getConfig(cfg *config.ReviveSettings) (*lint.Config, error) {
conf := defaultConfig()
@@ -284,7 +284,7 @@ func safeTomlSlice(r []any) []any {
}
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.3.9/config/config.go#L15
+// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L16
var defaultRules = []lint.Rule{
&rule.VarDeclarationsRule{},
&rule.PackageCommentsRule{},
@@ -368,12 +368,14 @@ var allRules = append([]lint.Rule{
&rule.EnforceSliceStyleRule{},
&rule.MaxControlNestingRule{},
&rule.CommentsDensityRule{},
+ &rule.FileLengthLimitRule{},
+ &rule.FilenameFormatRule{},
}, defaultRules...)
const defaultConfidence = 0.8
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L145
+// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L183
func normalizeConfig(cfg *lint.Config) {
// NOTE(ldez): this custom section for golangci-lint should be kept.
// ---
@@ -419,7 +421,7 @@ func normalizeConfig(cfg *lint.Config) {
}
// This element is not exported by revive, so we need copy the code.
-// Extracted from https://github.com/mgechev/revive/blob/v1.1.4/config/config.go#L214
+// Extracted from https://github.com/mgechev/revive/blob/v1.5.0/config/config.go#L252
func defaultConfig() *lint.Config {
defaultConfig := lint.Config{
Confidence: defaultConfidence,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
index b80a783b6..2fc247fab 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/golinters/tenv/tenv.go
@@ -25,5 +25,5 @@ func New(settings *config.TenvSettings) *goanalysis.Linter {
a.Doc,
[]*analysis.Analyzer{a},
cfg,
- ).WithLoadMode(goanalysis.LoadModeSyntax)
+ ).WithLoadMode(goanalysis.LoadModeTypesInfo)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
index 160620338..d04a11b81 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/context.go
@@ -4,7 +4,7 @@ import (
"context"
"fmt"
- "github.com/golangci/golangci-lint/internal/pkgcache"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/exitcodes"
"github.com/golangci/golangci-lint/pkg/fsutils"
@@ -19,13 +19,13 @@ type ContextBuilder struct {
pkgLoader *PackageLoader
fileCache *fsutils.FileCache
- pkgCache *pkgcache.Cache
+ pkgCache *cache.Cache
loadGuard *load.Guard
}
func NewContextBuilder(cfg *config.Config, pkgLoader *PackageLoader,
- fileCache *fsutils.FileCache, pkgCache *pkgcache.Cache, loadGuard *load.Guard,
+ fileCache *fsutils.FileCache, pkgCache *cache.Cache, loadGuard *load.Guard,
) *ContextBuilder {
return &ContextBuilder{
cfg: cfg,
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
index 57c51fa75..6d6d4b17e 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/config.go
@@ -81,7 +81,7 @@ func (lc *Config) IsSlowLinter() bool {
}
func (lc *Config) WithLoadFiles() *Config {
- lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles
+ lc.LoadMode |= packages.NeedName | packages.NeedFiles | packages.NeedCompiledGoFiles | packages.NeedModule
return lc
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
index 5c03630b2..9f29b5c4c 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/linter/context.go
@@ -5,7 +5,7 @@ import (
"golang.org/x/tools/go/packages"
- "github.com/golangci/golangci-lint/internal/pkgcache"
+ "github.com/golangci/golangci-lint/internal/cache"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/goanalysis/load"
@@ -24,7 +24,7 @@ type Context struct {
FileCache *fsutils.FileCache
Log logutils.Log
- PkgCache *pkgcache.Cache
+ PkgCache *cache.Cache
LoadGuard *load.Guard
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
index c06cd9a03..d2a2dc3d0 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/lintersdb/builder_linter.go
@@ -23,7 +23,6 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/errchkjson"
"github.com/golangci/golangci-lint/pkg/golinters/errname"
"github.com/golangci/golangci-lint/pkg/golinters/errorlint"
- "github.com/golangci/golangci-lint/pkg/golinters/execinquery"
"github.com/golangci/golangci-lint/pkg/golinters/exhaustive"
"github.com/golangci/golangci-lint/pkg/golinters/exhaustruct"
"github.com/golangci/golangci-lint/pkg/golinters/exportloopref"
@@ -55,6 +54,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan"
"github.com/golangci/golangci-lint/pkg/golinters/govet"
"github.com/golangci/golangci-lint/pkg/golinters/grouper"
+ "github.com/golangci/golangci-lint/pkg/golinters/iface"
"github.com/golangci/golangci-lint/pkg/golinters/importas"
"github.com/golangci/golangci-lint/pkg/golinters/inamedparam"
"github.com/golangci/golangci-lint/pkg/golinters/ineffassign"
@@ -85,6 +85,7 @@ import (
"github.com/golangci/golangci-lint/pkg/golinters/promlinter"
"github.com/golangci/golangci-lint/pkg/golinters/protogetter"
"github.com/golangci/golangci-lint/pkg/golinters/reassign"
+ "github.com/golangci/golangci-lint/pkg/golinters/recvcheck"
"github.com/golangci/golangci-lint/pkg/golinters/revive"
"github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck"
"github.com/golangci/golangci-lint/pkg/golinters/sloglint"
@@ -134,7 +135,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
// When a new linter is added the version in `WithSince(...)` must be the next minor version of golangci-lint.
return []*linter.Config{
linter.NewConfig(asasalint.New(&cfg.LintersSettings.Asasalint)).
- WithSince("1.47.0").
+ WithSince("v1.47.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/alingse/asasalint"),
@@ -145,7 +146,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/tdakkota/asciicheck"),
linter.NewConfig(bidichk.New(&cfg.LintersSettings.BiDiChk)).
- WithSince("1.43.0").
+ WithSince("v1.43.0").
WithPresets(linter.PresetBugs).
WithURL("https://github.com/breml/bidichk"),
@@ -162,7 +163,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/lasiar/canonicalHeader"),
linter.NewConfig(containedctx.New()).
- WithSince("1.44.0").
+ WithSince("v1.44.0").
WithLoadForGoAnalysis().
WithPresets(linter.PresetStyle).
WithURL("https://github.com/sivchari/containedctx"),
@@ -213,7 +214,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/mibk/dupl"),
linter.NewConfig(dupword.New(&cfg.LintersSettings.DupWord)).
- WithSince("1.50.0").
+ WithSince("v1.50.0").
WithPresets(linter.PresetComment).
WithURL("https://github.com/Abirdcfly/dupword"),
@@ -231,7 +232,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/kisielk/errcheck"),
linter.NewConfig(errchkjson.New(&cfg.LintersSettings.ErrChkJSON)).
- WithSince("1.44.0").
+ WithSince("v1.44.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/breml/errchkjson"),
@@ -248,12 +249,12 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithURL("https://github.com/polyfloyd/go-errorlint"),
- linter.NewConfig(execinquery.New()).
+ linter.NewConfig(linter.NewNoopDeprecated("execinquery", cfg, linter.DeprecationError)).
WithSince("v1.46.0").
WithPresets(linter.PresetSQL).
WithLoadForGoAnalysis().
WithURL("https://github.com/1uf3/execinquery").
- DeprecatedWarning("The repository of the linter has been archived by the owner.", "v1.58.0", ""),
+ DeprecatedError("The repository of the linter has been archived by the owner.", "v1.58.0", ""),
linter.NewConfig(exhaustive.New(&cfg.LintersSettings.Exhaustive)).
WithSince(" v1.28.0").
@@ -297,7 +298,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/gostaticanalysis/forcetypeassert"),
linter.NewConfig(fatcontext.New()).
- WithSince("1.58.0").
+ WithSince("v1.58.0").
WithPresets(linter.PresetPerformance).
WithLoadForGoAnalysis().
WithURL("https://github.com/Crocmagnon/fatcontext"),
@@ -334,7 +335,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.12.0").
WithPresets(linter.PresetStyle),
- linter.NewConfig(gochecksumtype.New()).
+ linter.NewConfig(gochecksumtype.New(&cfg.LintersSettings.GoChecksumType)).
WithSince("v1.55.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
@@ -416,11 +417,11 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithPresets(linter.PresetStyle).
WithURL("https://github.com/tommy-muehle/go-mnd"),
- linter.NewConfig(mnd.NewGoMND(&cfg.LintersSettings.Gomnd)).
+ linter.NewConfig(linter.NewNoopDeprecated("gomnd", cfg, linter.DeprecationError)).
WithSince("v1.22.0").
WithPresets(linter.PresetStyle).
WithURL("https://github.com/tommy-muehle/go-mnd").
- DeprecatedWarning("The linter has been renamed.", "v1.58.0", "mnd"),
+ DeprecatedError("The linter has been renamed.", "v1.58.0", "mnd"),
linter.NewConfig(gomoddirectives.New(&cfg.LintersSettings.GoModDirectives)).
WithSince("v1.39.0").
@@ -435,7 +436,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(goprintffuncname.New()).
WithSince("v1.23.0").
WithPresets(linter.PresetStyle).
- WithURL("https://github.com/jirfag/go-printf-func-name"),
+ WithURL("https://github.com/golangci/go-printf-func-name"),
linter.NewConfig(gosec.New(&cfg.LintersSettings.Gosec)).
WithSince("v1.0.0").
@@ -477,6 +478,12 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/esimonov/ifshort").
DeprecatedError("The repository of the linter has been deprecated by the owner.", "v1.48.0", ""),
+ linter.NewConfig(iface.New(&cfg.LintersSettings.Iface)).
+ WithSince("v1.62.0").
+ WithLoadForGoAnalysis().
+ WithPresets(linter.PresetStyle).
+ WithURL("https://github.com/uudashr/iface"),
+
linter.NewConfig(importas.New(&cfg.LintersSettings.ImportAs)).
WithSince("v1.38.0").
WithPresets(linter.PresetStyle).
@@ -652,11 +659,17 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithURL("https://github.com/ghostiam/protogetter"),
linter.NewConfig(reassign.New(&cfg.LintersSettings.Reassign)).
- WithSince("1.49.0").
+ WithSince("v1.49.0").
WithPresets(linter.PresetBugs).
WithLoadForGoAnalysis().
WithURL("https://github.com/curioswitch/go-reassign"),
+ linter.NewConfig(recvcheck.New()).
+ WithSince("v1.62.0").
+ WithPresets(linter.PresetBugs).
+ WithLoadForGoAnalysis().
+ WithURL("https://github.com/raeperd/recvcheck"),
+
linter.NewConfig(revive.New(&cfg.LintersSettings.Revive)).
WithSince("v1.37.0").
WithPresets(linter.PresetStyle, linter.PresetMetaLinter).
@@ -699,7 +712,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithLoadForGoAnalysis().
WithPresets(linter.PresetBugs, linter.PresetMetaLinter).
WithAlternativeNames(megacheckName).
- WithURL("https://staticcheck.io/"),
+ WithURL("https://staticcheck.dev/"),
linter.NewConfig(linter.NewNoopDeprecated("structcheck", cfg, linter.DeprecationError)).
WithSince("v1.0.0").
@@ -838,6 +851,6 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
WithSince("v1.26.0").
WithPresets(linter.PresetStyle).
WithAutoFix().
- WithURL("https://github.com/golangci/golangci-lint/blob/master/pkg/golinters/nolintlint/README.md"),
+ WithURL("https://github.com/golangci/golangci-lint/tree/master/pkg/golinters/nolintlint/internal"),
}, nil
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
index c3b983ff6..2c47c7166 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/lint/runner.go
@@ -115,17 +115,17 @@ func (r *Runner) Run(ctx context.Context, linters []*linter.Config) ([]result.Is
)
for _, lc := range linters {
- sw.TrackStage(lc.Name(), func() {
- linterIssues, err := r.runLinterSafe(ctx, r.lintCtx, lc)
- if err != nil {
- lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err)
- r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
+ linterIssues, err := timeutils.TrackStage(sw, lc.Name(), func() ([]result.Issue, error) {
+ return r.runLinterSafe(ctx, r.lintCtx, lc)
+ })
+ if err != nil {
+ lintErrors = errors.Join(lintErrors, fmt.Errorf("can't run linter %s", lc.Linter.Name()), err)
+ r.Log.Warnf("Can't run linter %s: %v", lc.Linter.Name(), err)
- return
- }
+ continue
+ }
- issues = append(issues, linterIssues...)
- })
+ issues = append(issues, linterIssues...)
}
return r.processLintResults(issues), lintErrors
@@ -188,9 +188,7 @@ func (r *Runner) processLintResults(inIssues []result.Issue) []result.Issue {
// finalize processors: logging, clearing, no heavy work here
for _, p := range r.Processors {
- sw.TrackStage(p.Name(), func() {
- p.Finish()
- })
+ sw.TrackStage(p.Name(), p.Finish)
}
if issuesBefore != issuesAfter {
@@ -216,10 +214,8 @@ func (r *Runner) printPerProcessorStat(stat map[string]processorStat) {
func (r *Runner) processIssues(issues []result.Issue, sw *timeutils.Stopwatch, statPerProcessor map[string]processorStat) []result.Issue {
for _, p := range r.Processors {
- var newIssues []result.Issue
- var err error
- sw.TrackStage(p.Name(), func() {
- newIssues, err = p.Process(issues)
+ newIssues, err := timeutils.TrackStage(sw, p.Name(), func() ([]result.Issue, error) {
+ return p.Process(issues)
})
if err != nil {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
index e4bb98109..3c27e2557 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/logutils/logutils.go
@@ -60,11 +60,10 @@ const (
)
const (
- DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter.
- DebugKeyGovet = "govet" // Debugs `govet` linter.
- DebugKeyMegacheck = "megacheck" // Debugs `staticcheck` related linters.
- DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments.
- DebugKeyRevive = "revive" // Debugs `revive` linter.
+ DebugKeyGoCritic = "gocritic" // Debugs `go-critic` linter.
+ DebugKeyGovet = "govet" // Debugs `govet` linter.
+ DebugKeyNolint = "nolint" // Debugs a filter excluding issues by `//nolint` comments.
+ DebugKeyRevive = "revive" // Debugs `revive` linter.
)
func getEnabledDebugs() map[string]bool {
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
index 50d6dcff3..b65339682 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/printers/codeclimate.go
@@ -12,9 +12,10 @@ const defaultCodeClimateSeverity = "critical"
// CodeClimateIssue is a subset of the Code Climate spec.
// https://github.com/codeclimate/platform/blob/master/spec/analyzers/SPEC.md#data-types
// It is just enough to support GitLab CI Code Quality.
-// https://docs.gitlab.com/ee/user/project/merge_requests/code_quality.html
+// https://docs.gitlab.com/ee/ci/testing/code_quality.html#implement-a-custom-tool
type CodeClimateIssue struct {
Description string `json:"description"`
+ CheckName string `json:"check_name"`
Severity string `json:"severity,omitempty"`
Fingerprint string `json:"fingerprint"`
Location struct {
@@ -35,10 +36,13 @@ func NewCodeClimate(w io.Writer) *CodeClimate {
func (p CodeClimate) Print(issues []result.Issue) error {
codeClimateIssues := make([]CodeClimateIssue, 0, len(issues))
+
for i := range issues {
issue := &issues[i]
+
codeClimateIssue := CodeClimateIssue{}
codeClimateIssue.Description = issue.Description()
+ codeClimateIssue.CheckName = issue.FromLinter
codeClimateIssue.Location.Path = issue.Pos.Filename
codeClimateIssue.Location.Lines.Begin = issue.Pos.Line
codeClimateIssue.Fingerprint = issue.Fingerprint()
@@ -51,9 +55,5 @@ func (p CodeClimate) Print(issues []result.Issue) error {
codeClimateIssues = append(codeClimateIssues, codeClimateIssue)
}
- err := json.NewEncoder(p.w).Encode(codeClimateIssues)
- if err != nil {
- return err
- }
- return nil
+ return json.NewEncoder(p.w).Encode(codeClimateIssues)
}
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
index 4915dc479..764af5a92 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/result/processors/fixer.go
@@ -8,7 +8,7 @@ import (
"sort"
"strings"
- "github.com/golangci/golangci-lint/internal/robustio"
+ "github.com/golangci/golangci-lint/internal/go/robustio"
"github.com/golangci/golangci-lint/pkg/config"
"github.com/golangci/golangci-lint/pkg/fsutils"
"github.com/golangci/golangci-lint/pkg/logutils"
@@ -56,9 +56,8 @@ func (p Fixer) Process(issues []result.Issue) ([]result.Issue, error) {
}
for file, issuesToFix := range issuesToFixPerFile {
- var err error
- p.sw.TrackStage("all", func() {
- err = p.fixIssuesInFile(file, issuesToFix)
+ err := p.sw.TrackStageErr("all", func() error {
+ return p.fixIssuesInFile(file, issuesToFix)
})
if err != nil {
p.log.Errorf("Failed to fix issues in file %s: %s", file, err)
diff --git a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
index d944dea2e..95b16de9f 100644
--- a/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
+++ b/vendor/github.com/golangci/golangci-lint/pkg/timeutils/stopwatch.go
@@ -114,3 +114,25 @@ func (s *Stopwatch) TrackStage(name string, f func()) {
s.stages[name] += time.Since(startedAt)
s.mu.Unlock()
}
+
+func (s *Stopwatch) TrackStageErr(name string, f func() error) error {
+ startedAt := time.Now()
+ err := f()
+
+ s.mu.Lock()
+ s.stages[name] += time.Since(startedAt)
+ s.mu.Unlock()
+
+ return err
+}
+
+func TrackStage[T any](s *Stopwatch, name string, f func() (T, error)) (T, error) {
+ var result T
+ var err error
+
+ s.TrackStage(name, func() {
+ result, err = f()
+ })
+
+ return result, err
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
index dff391797..98f28e9a6 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker.go
@@ -84,7 +84,8 @@ func walkThroughEmbeddedInterfaces(sel *types.Selection) ([]types.Type, bool) {
}
func getTypeAtFieldIndex(startingAt types.Type, fieldIndex int) types.Type {
- t := maybeUnname(maybeDereference(startingAt))
+ t := maybeDereference(maybeUnalias(startingAt))
+ t = maybeUnname(maybeUnalias(t))
s, ok := t.(*types.Struct)
if !ok {
panic(fmt.Sprintf("cannot get Field of a type that is not a struct, got a %T", t))
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go
new file mode 100644
index 000000000..f2df6849b
--- /dev/null
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_121.go
@@ -0,0 +1,10 @@
+//go:build !go1.22
+// +build !go1.22
+
+package errcheck
+
+import "go/types"
+
+func maybeUnalias(t types.Type) types.Type {
+ return t
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go
new file mode 100644
index 000000000..cbff3cd43
--- /dev/null
+++ b/vendor/github.com/kisielk/errcheck/errcheck/embedded_walker_122.go
@@ -0,0 +1,10 @@
+//go:build go1.22
+// +build go1.22
+
+package errcheck
+
+import "go/types"
+
+func maybeUnalias(t types.Type) types.Type {
+ return types.Unalias(t)
+}
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
index d61d348f7..a7a2a30bf 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/errcheck.go
@@ -80,7 +80,7 @@ func (r *Result) Append(other Result) {
r.UncheckedErrors = append(r.UncheckedErrors, other.UncheckedErrors...)
}
-// Returns the unique errors that have been accumulated. Duplicates may occur
+// Unique returns the unique errors that have been accumulated. Duplicates may occur
// when a file containing an unchecked error belongs to > 1 package.
//
// The method receiver remains unmodified after the call to Unique.
@@ -338,7 +338,7 @@ func (v *visitor) selectorName(call *ast.CallExpr) string {
// then just that function's fullName is returned.
//
// Otherwise, we walk through all the potentially embedded interfaces of the receiver
-// the collect a list of type-qualified function names that we will check.
+// to collect a list of type-qualified function names that we will check.
func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string {
sel, fn, ok := v.selectorAndFunc(call)
if !ok {
@@ -351,7 +351,7 @@ func (v *visitor) namesForExcludeCheck(call *ast.CallExpr) []string {
}
// This will be missing for functions without a receiver (like fmt.Printf),
- // so just fall back to the the function's fullName in that case.
+ // so just fall back to the function's fullName in that case.
selection, ok := v.typesInfo.Selections[sel]
if !ok {
return []string{name}
@@ -420,9 +420,9 @@ func (v *visitor) ignoreCall(call *ast.CallExpr) bool {
// 2. x.y.f()
var id *ast.Ident
switch exp := call.Fun.(type) {
- case (*ast.Ident):
+ case *ast.Ident:
id = exp
- case (*ast.SelectorExpr):
+ case *ast.SelectorExpr:
id = exp.Sel
default:
// eg: *ast.SliceExpr, *ast.IndexExpr
@@ -586,26 +586,38 @@ func (v *visitor) Visit(node ast.Node) ast.Visitor {
for _, name := range vspec.Names {
lhs = append(lhs, ast.Expr(name))
}
- v.checkAssignment(lhs, vspec.Values)
+ followed := v.checkAssignment(lhs, vspec.Values)
+ if !followed {
+ return nil
+ }
}
case *ast.AssignStmt:
- v.checkAssignment(stmt.Lhs, stmt.Rhs)
+ followed := v.checkAssignment(stmt.Lhs, stmt.Rhs)
+ if !followed {
+ return nil
+ }
+
+ case *ast.TypeAssertExpr:
+ v.checkAssertExpr(stmt)
+ return nil
default:
}
return v
}
-func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
+// checkAssignment checks the assignment statement and returns a boolean value
+// indicating whether to continue checking the substructure in AssignStmt or not
+func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) (followed bool) {
if len(rhs) == 1 {
// single value on rhs; check against lhs identifiers
if call, ok := rhs[0].(*ast.CallExpr); ok {
if !v.blank {
- return
+ return true
}
if v.ignoreCall(call) {
- return
+ return true
}
isError := v.errorsByArg(call)
for i := 0; i < len(lhs); i++ {
@@ -619,11 +631,11 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
}
} else if assert, ok := rhs[0].(*ast.TypeAssertExpr); ok {
if !v.asserts {
- return
+ return false
}
if assert.Type == nil {
// type switch
- return
+ return false
}
if len(lhs) < 2 {
// assertion result not read
@@ -632,6 +644,7 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
// assertion result ignored
v.addErrorAtPosition(id.NamePos, nil)
}
+ return false
}
} else {
// multiple value on rhs; in this case a call can't return
@@ -661,6 +674,19 @@ func (v *visitor) checkAssignment(lhs, rhs []ast.Expr) {
}
}
}
+
+ return true
+}
+
+func (v *visitor) checkAssertExpr(expr *ast.TypeAssertExpr) {
+ if !v.asserts {
+ return
+ }
+ if expr.Type == nil {
+ // type switch
+ return
+ }
+ v.addErrorAtPosition(expr.Pos(), nil)
}
func isErrorType(t types.Type) bool {
diff --git a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
index a783b5a76..450b798e4 100644
--- a/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
+++ b/vendor/github.com/kisielk/errcheck/errcheck/excludes.go
@@ -47,6 +47,11 @@ var DefaultExcludedSymbols = []string{
// hash
"(hash.Hash).Write",
+
+ // hash/maphash
+ "(*hash/maphash.Hash).Write",
+ "(*hash/maphash.Hash).WriteByte",
+ "(*hash/maphash.Hash).WriteString",
}
// ReadExcludes reads an excludes file, a newline delimited file that lists
diff --git a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
index 5652c8d6c..997ec0cb0 100644
--- a/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
+++ b/vendor/github.com/lasiar/canonicalheader/.golangci.yaml
@@ -40,6 +40,9 @@ linters:
fast: false
enable:
+ # Globals and init() are no ok, because this linter use on golangci lint.
+ - gochecknoglobals
+ - gochecknoinits
# Check for pass []any as any in variadic func(...any).
# Rare case but saved me from debugging a few times.
- asasalint
@@ -58,6 +61,12 @@ linters:
# Check whether the function uses a non-inherited context.
- contextcheck
+ # after go 1.22 don't need copy var at for range.
+ - copyloopvar
+
+ # Find duplicate words, rare.
+ - dupword
+
# Check for two durations multiplied together.
- durationcheck
@@ -73,6 +82,10 @@ linters:
# Checks for pointers to enclosing loop variables.
- exportloopref
+
+ # Imports order.
+ - gci
+
# As you already know I'm a co-author. It would be strange to not use
# one of my warmly loved projects.
- gocritic
@@ -104,9 +117,15 @@ linters:
# Last week I caught a bug with it.
- ineffassign
+ # range over int, work after go 1.22
+ - intrange
+
# Fix all the misspells, amazing thing.
- misspell
+ # Reports wrong mirror patterns of bytes/strings usage.
+ - mirror
+
# Finds naked/bare returns and requires change them.
- nakedret
@@ -121,6 +140,9 @@ linters:
# Better not to have //nolint: at all ;)
- nolintlint
+ # aiming at usages of fmt.Sprintf which have faster alternatives.
+ - perfsprint
+
# Finds slices that could potentially be pre-allocated.
# Small performance win + cleaner code.
- prealloc
@@ -144,6 +166,9 @@ linters:
- rowserrcheck
- sqlclosecheck
+ # Ensure consistent code style when using log/slog.
+ - sloglint
+
# I have found that it's not the same as staticcheck binary :\
- staticcheck
@@ -156,6 +181,7 @@ linters:
# Test-related checks. All of them are good.
- tenv
- testableexamples
+ - testifylint
- thelper
- tparallel
@@ -185,9 +211,6 @@ linters:
# (c) Bryan C. Mills / https://github.com/bcmills
- cyclop
- # Abandoned, replaced by `unused`.
- - deadcode
-
# Check declaration order of types, consts, vars and funcs.
# I like it but I don't use it.
- decorder
@@ -202,9 +225,6 @@ linters:
# Tool for code clone detection.
- dupl
- # Find duplicate words, rare.
- - dupword
-
# I'm fine to check the error from json.Marshal ¯\_(ツ)_/¯
- errchkjson
@@ -213,7 +233,6 @@ linters:
# Forces to handle more cases. Cool but noisy.
- exhaustive
- - exhaustivestruct # Deprecated, replaced by check below.
- exhaustruct
# Forbids some identifiers. I don't have a case for it.
@@ -225,19 +244,12 @@ linters:
# I might have long but a simple function.
- funlen
- # Imports order. I do this manually ¯\_(ツ)_/¯
- - gci
-
# I'm not a fan of ginkgo and gomega packages.
- ginkgolinter
# Checks that compiler directive comments (//go:) are valid. Rare.
- gocheckcompilerdirectives
- # Globals and init() are ok.
- - gochecknoglobals
- - gochecknoinits
-
# Same as `cyclop` linter (see above)
- gocognit
- goconst
@@ -247,16 +259,13 @@ linters:
- godox
# Check the error handling expressions. Too noisy.
- - goerr113
+ - err113
# I don't use file headers.
- goheader
- # 1st Go linter, deprecated :( use `revive`.
- - golint
-
# Reports magic consts. Might be noisy but still good.
- - gomnd
+ - mnd
# Allowed/blocked packages to import. I prefer to do it manually.
- gomodguard
@@ -267,9 +276,6 @@ linters:
# Groupt declarations, I prefer manually.
- grouper
- # Deprecated.
- - ifshort
-
# Checks imports aliases, rare.
- importas
@@ -291,9 +297,6 @@ linters:
# Slice declarations with non-zero initial length. Not my case.
- makezero
- # Deprecated. Use govet `fieldalignment`.
- - maligned
-
# Enforce tags in un/marshaled structs. Cool but not my case.
- musttag
@@ -306,9 +309,6 @@ linters:
# Reports all named returns, not that bad.
- nonamedreturns
- # Deprecated. Replaced by `revive`.
- - nosnakecase
-
# Finds misuse of Sprintf with host:port in a URL. Cool but rare.
- nosprintfhostport
@@ -335,6 +335,12 @@ linters:
- wsl
linters-settings:
+ gci:
+ sections:
+ - standard
+ - default
+ - localmodule
+
revive:
# Maximum number of open files at the same time.
# See https://github.com/mgechev/revive#command-line-flags
@@ -378,7 +384,6 @@ linters-settings:
- name: banned-characters
severity: warning
disabled: false
- arguments: ["Ω", "Σ", "σ", "7"]
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#bare-return
- name: bare-return
severity: warning
@@ -404,9 +409,6 @@ linters-settings:
- name: comment-spacings
severity: warning
disabled: false
- arguments:
- - mypragma
- - otherpragma
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#confusing-naming
- name: confusing-naming
severity: warning
@@ -444,8 +446,6 @@ linters-settings:
- name: defer
severity: warning
disabled: false
- arguments:
- - ["call-chain", "loop"]
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#dot-imports
- name: dot-imports
severity: warning
@@ -470,8 +470,6 @@ linters-settings:
- name: enforce-map-style
severity: warning
disabled: false
- arguments:
- - "make"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#error-naming
- name: error-naming
severity: warning
@@ -530,8 +528,6 @@ linters-settings:
- name: indent-error-flow
severity: warning
disabled: false
- arguments:
- - "preserveScope"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-alias-naming
- name: import-alias-naming
severity: warning
@@ -542,9 +538,6 @@ linters-settings:
- name: imports-blacklist
severity: warning
disabled: false
- arguments:
- - "crypto/md5"
- - "crypto/sha1"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#import-shadowing
- name: import-shadowing
severity: warning
@@ -632,8 +625,6 @@ linters-settings:
- name: superfluous-else
severity: warning
disabled: false
- arguments:
- - "preserveScope"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#time-equal
- name: time-equal
severity: warning
@@ -646,10 +637,6 @@ linters-settings:
- name: var-naming
severity: warning
disabled: false
- arguments:
- - ["ID"] # AllowList
- - ["VM"] # DenyList
- - - upperCaseConst: true
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#var-declaration
- name: var-declaration
severity: warning
@@ -670,9 +657,6 @@ linters-settings:
- name: unhandled-error
severity: warning
disabled: false
- arguments:
- - "fmt.Printf"
- - "myFunction"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#unnecessary-stmt
- name: unnecessary-stmt
severity: warning
@@ -691,8 +675,6 @@ linters-settings:
- name: unused-receiver
severity: warning
disabled: false
- arguments:
- - allowRegex: "^_"
# https://github.com/mgechev/revive/blob/master/RULES_DESCRIPTIONS.md#useless-break
- name: useless-break
severity: warning
diff --git a/vendor/github.com/lasiar/canonicalheader/analyzer.go b/vendor/github.com/lasiar/canonicalheader/analyzer.go
index d3fb529eb..258ebdfd4 100644
--- a/vendor/github.com/lasiar/canonicalheader/analyzer.go
+++ b/vendor/github.com/lasiar/canonicalheader/analyzer.go
@@ -18,6 +18,7 @@ const (
name = "Header"
)
+//nolint:gochecknoglobals // struct is not big, can be skip.
var Analyzer = &analysis.Analyzer{
Name: "canonicalheader",
Doc: "canonicalheader checks whether net/http.Header uses canonical header",
diff --git a/vendor/github.com/lufeee/execinquery/.gitignore b/vendor/github.com/lufeee/execinquery/.gitignore
deleted file mode 100644
index 00e1abc31..000000000
--- a/vendor/github.com/lufeee/execinquery/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-execinquery
diff --git a/vendor/github.com/lufeee/execinquery/README.md b/vendor/github.com/lufeee/execinquery/README.md
deleted file mode 100644
index 38fa7c8b9..000000000
--- a/vendor/github.com/lufeee/execinquery/README.md
+++ /dev/null
@@ -1,76 +0,0 @@
-# execinquery - a simple query string checker in Query function
-[![Go Matrix](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/go-cross.yml)
-[![Go lint](https://github.com/lufeee/execinquery/actions/workflows/lint.yml/badge.svg?branch=main)](https://github.com/lufeee/execinquery/actions/workflows/lint.yml)
-[![MIT License](http://img.shields.io/badge/license-MIT-blue.svg?style=flat)](LICENSE)
-## About
-
-execinquery is a linter about query string checker in Query function which reads your Go src files and
-warnings it finds.
-
-## Installation
-
-```sh
-go install github.com/lufeee/execinquery/cmd/execinquery
-```
-
-## Usage
-```go
-package main
-
-import (
- "database/sql"
- "log"
-)
-
-func main() {
- db, err := sql.Open("mysql", "test:test@tcp(test:3306)/test")
- if err != nil {
- log.Fatal("Database Connect Error: ", err)
- }
- defer db.Close()
-
- test := "a"
- _, err = db.Query("Update * FROM hoge where id = ?", test)
- if err != nil {
- log.Fatal("Query Error: ", err)
- }
-
-}
-```
-
-```console
-go vet -vettool=$(which execinquery) ./...
-
-# command-line-arguments
-./a.go:16:11: Use Exec instead of Query to execute `UPDATE` query
-```
-
-## CI
-
-### CircleCI
-
-```yaml
-- run:
- name: install execinquery
- command: go install github.com/lufeee/execinquery
-
-- run:
- name: run execinquery
- command: go vet -vettool=`which execinquery` ./...
-```
-
-### GitHub Actions
-
-```yaml
-- name: install execinquery
- run: go install github.com/lufeee/execinquery
-
-- name: run execinquery
- run: go vet -vettool=`which execinquery` ./...
-```
-
-### License
-
-MIT license.
-
-<hr>
diff --git a/vendor/github.com/lufeee/execinquery/execinquery.go b/vendor/github.com/lufeee/execinquery/execinquery.go
deleted file mode 100644
index c37dc1701..000000000
--- a/vendor/github.com/lufeee/execinquery/execinquery.go
+++ /dev/null
@@ -1,135 +0,0 @@
-package execinquery
-
-import (
- "go/ast"
- "regexp"
- "strings"
-
- "golang.org/x/tools/go/analysis"
- "golang.org/x/tools/go/analysis/passes/inspect"
- "golang.org/x/tools/go/ast/inspector"
-)
-
-const doc = "execinquery is a linter about query string checker in Query function which reads your Go src files and warning it finds"
-
-// Analyzer is checking database/sql pkg Query's function
-var Analyzer = &analysis.Analyzer{
- Name: "execinquery",
- Doc: doc,
- Run: newLinter().run,
- Requires: []*analysis.Analyzer{
- inspect.Analyzer,
- },
-}
-
-type linter struct {
- commentExp *regexp.Regexp
- multilineCommentExp *regexp.Regexp
-}
-
-func newLinter() *linter {
- return &linter{
- commentExp: regexp.MustCompile(`--[^\n]*\n`),
- multilineCommentExp: regexp.MustCompile(`(?s)/\*.*?\*/`),
- }
-}
-
-func (l linter) run(pass *analysis.Pass) (interface{}, error) {
- result := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
-
- nodeFilter := []ast.Node{
- (*ast.CallExpr)(nil),
- }
-
- result.Preorder(nodeFilter, func(n ast.Node) {
- switch n := n.(type) {
- case *ast.CallExpr:
- selector, ok := n.Fun.(*ast.SelectorExpr)
- if !ok {
- return
- }
-
- if pass.TypesInfo == nil || pass.TypesInfo.Uses[selector.Sel] == nil || pass.TypesInfo.Uses[selector.Sel].Pkg() == nil {
- return
- }
-
- if "database/sql" != pass.TypesInfo.Uses[selector.Sel].Pkg().Path() {
- return
- }
-
- if !strings.Contains(selector.Sel.Name, "Query") {
- return
- }
-
- replacement := "Exec"
- var i int // the index of the query argument
- if strings.Contains(selector.Sel.Name, "Context") {
- replacement += "Context"
- i = 1
- }
-
- if len(n.Args) <= i {
- return
- }
-
- query := l.getQueryString(n.Args[i])
- if query == "" {
- return
- }
-
- query = strings.TrimSpace(l.cleanValue(query))
- parts := strings.SplitN(query, " ", 2)
- cmd := strings.ToUpper(parts[0])
-
- if strings.HasPrefix(cmd, "SELECT") {
- return
- }
-
- pass.Reportf(n.Fun.Pos(), "Use %s instead of %s to execute `%s` query", replacement, selector.Sel.Name, cmd)
- }
- })
-
- return nil, nil
-}
-
-func (l linter) cleanValue(s string) string {
- v := strings.NewReplacer(`"`, "", "`", "").Replace(s)
-
- v = l.multilineCommentExp.ReplaceAllString(v, "")
-
- return l.commentExp.ReplaceAllString(v, "")
-}
-
-func (l linter) getQueryString(exp interface{}) string {
- switch e := exp.(type) {
- case *ast.AssignStmt:
- var v string
- for _, stmt := range e.Rhs {
- v += l.cleanValue(l.getQueryString(stmt))
- }
- return v
-
- case *ast.BasicLit:
- return e.Value
-
- case *ast.ValueSpec:
- var v string
- for _, value := range e.Values {
- v += l.cleanValue(l.getQueryString(value))
- }
- return v
-
- case *ast.Ident:
- if e.Obj == nil {
- return ""
- }
- return l.getQueryString(e.Obj.Decl)
-
- case *ast.BinaryExpr:
- v := l.cleanValue(l.getQueryString(e.X))
- v += l.cleanValue(l.getQueryString(e.Y))
- return v
- }
-
- return ""
-}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
index e5d890c26..ad025ad52 100644
--- a/vendor/github.com/mattn/go-runewidth/runewidth_table.go
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
@@ -4,20 +4,21 @@ package runewidth
var combining = table{
{0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3},
- {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01},
- {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1AC0},
- {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF},
+ {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0CF3, 0x0CF3},
+ {0x0D00, 0x0D01}, {0x135D, 0x135F}, {0x1A7F, 0x1A7F},
+ {0x1AB0, 0x1ACE}, {0x1B6B, 0x1B73}, {0x1DC0, 0x1DFF},
{0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF},
{0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D},
{0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1},
{0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A},
- {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x11300, 0x11301},
- {0x1133B, 0x1133C}, {0x11366, 0x1136C}, {0x11370, 0x11374},
- {0x16AF0, 0x16AF4}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
+ {0x10EAB, 0x10EAC}, {0x10F46, 0x10F50}, {0x10F82, 0x10F85},
+ {0x11300, 0x11301}, {0x1133B, 0x1133C}, {0x11366, 0x1136C},
+ {0x11370, 0x11374}, {0x16AF0, 0x16AF4}, {0x1CF00, 0x1CF2D},
+ {0x1CF30, 0x1CF46}, {0x1D165, 0x1D169}, {0x1D16D, 0x1D172},
{0x1D17B, 0x1D182}, {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD},
{0x1D242, 0x1D244}, {0x1E000, 0x1E006}, {0x1E008, 0x1E018},
{0x1E01B, 0x1E021}, {0x1E023, 0x1E024}, {0x1E026, 0x1E02A},
- {0x1E8D0, 0x1E8D6},
+ {0x1E08F, 0x1E08F}, {0x1E8D0, 0x1E8D6},
}
var doublewidth = table{
@@ -33,33 +34,34 @@ var doublewidth = table{
{0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
{0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
{0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
- {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
- {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
- {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31E3},
- {0x31F0, 0x321E}, {0x3220, 0x3247}, {0x3250, 0x4DBF},
- {0x4E00, 0xA48C}, {0xA490, 0xA4C6}, {0xA960, 0xA97C},
- {0xAC00, 0xD7A3}, {0xF900, 0xFAFF}, {0xFE10, 0xFE19},
- {0xFE30, 0xFE52}, {0xFE54, 0xFE66}, {0xFE68, 0xFE6B},
- {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4},
- {0x16FF0, 0x16FF1}, {0x17000, 0x187F7}, {0x18800, 0x18CD5},
- {0x18D00, 0x18D08}, {0x1B000, 0x1B11E}, {0x1B150, 0x1B152},
- {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004},
- {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A},
- {0x1F200, 0x1F202}, {0x1F210, 0x1F23B}, {0x1F240, 0x1F248},
- {0x1F250, 0x1F251}, {0x1F260, 0x1F265}, {0x1F300, 0x1F320},
- {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393},
- {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0},
- {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440},
- {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E},
- {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596},
- {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5},
- {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D7},
- {0x1F6EB, 0x1F6EC}, {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB},
- {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F978},
- {0x1F97A, 0x1F9CB}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA74},
- {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA86}, {0x1FA90, 0x1FAA8},
- {0x1FAB0, 0x1FAB6}, {0x1FAC0, 0x1FAC2}, {0x1FAD0, 0x1FAD6},
- {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
+ {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x303E},
+ {0x3041, 0x3096}, {0x3099, 0x30FF}, {0x3105, 0x312F},
+ {0x3131, 0x318E}, {0x3190, 0x31E3}, {0x31EF, 0x321E},
+ {0x3220, 0x3247}, {0x3250, 0x4DBF}, {0x4E00, 0xA48C},
+ {0xA490, 0xA4C6}, {0xA960, 0xA97C}, {0xAC00, 0xD7A3},
+ {0xF900, 0xFAFF}, {0xFE10, 0xFE19}, {0xFE30, 0xFE52},
+ {0xFE54, 0xFE66}, {0xFE68, 0xFE6B}, {0xFF01, 0xFF60},
+ {0xFFE0, 0xFFE6}, {0x16FE0, 0x16FE4}, {0x16FF0, 0x16FF1},
+ {0x17000, 0x187F7}, {0x18800, 0x18CD5}, {0x18D00, 0x18D08},
+ {0x1AFF0, 0x1AFF3}, {0x1AFF5, 0x1AFFB}, {0x1AFFD, 0x1AFFE},
+ {0x1B000, 0x1B122}, {0x1B132, 0x1B132}, {0x1B150, 0x1B152},
+ {0x1B155, 0x1B155}, {0x1B164, 0x1B167}, {0x1B170, 0x1B2FB},
+ {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF}, {0x1F18E, 0x1F18E},
+ {0x1F191, 0x1F19A}, {0x1F200, 0x1F202}, {0x1F210, 0x1F23B},
+ {0x1F240, 0x1F248}, {0x1F250, 0x1F251}, {0x1F260, 0x1F265},
+ {0x1F300, 0x1F320}, {0x1F32D, 0x1F335}, {0x1F337, 0x1F37C},
+ {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA}, {0x1F3CF, 0x1F3D3},
+ {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4}, {0x1F3F8, 0x1F43E},
+ {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC}, {0x1F4FF, 0x1F53D},
+ {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567}, {0x1F57A, 0x1F57A},
+ {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4}, {0x1F5FB, 0x1F64F},
+ {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC}, {0x1F6D0, 0x1F6D2},
+ {0x1F6D5, 0x1F6D7}, {0x1F6DC, 0x1F6DF}, {0x1F6EB, 0x1F6EC},
+ {0x1F6F4, 0x1F6FC}, {0x1F7E0, 0x1F7EB}, {0x1F7F0, 0x1F7F0},
+ {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1F9FF},
+ {0x1FA70, 0x1FA7C}, {0x1FA80, 0x1FA88}, {0x1FA90, 0x1FABD},
+ {0x1FABF, 0x1FAC5}, {0x1FACE, 0x1FADB}, {0x1FAE0, 0x1FAE8},
+ {0x1FAF0, 0x1FAF8}, {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
}
var ambiguous = table{
@@ -154,43 +156,43 @@ var neutral = table{
{0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
{0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F},
{0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4},
- {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A},
- {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D},
- {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E},
- {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08C7},
- {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990},
- {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2},
- {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8},
- {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD},
- {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03},
- {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28},
- {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36},
- {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
- {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
- {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76},
- {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91},
- {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3},
- {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9},
- {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3},
- {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03},
- {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28},
- {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39},
- {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D},
- {0x0B55, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63},
- {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A},
- {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A},
- {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4},
- {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2},
- {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0},
- {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C},
- {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39},
- {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
- {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63},
+ {0x0600, 0x070D}, {0x070F, 0x074A}, {0x074D, 0x07B1},
+ {0x07C0, 0x07FA}, {0x07FD, 0x082D}, {0x0830, 0x083E},
+ {0x0840, 0x085B}, {0x085E, 0x085E}, {0x0860, 0x086A},
+ {0x0870, 0x088E}, {0x0890, 0x0891}, {0x0898, 0x0983},
+ {0x0985, 0x098C}, {0x098F, 0x0990}, {0x0993, 0x09A8},
+ {0x09AA, 0x09B0}, {0x09B2, 0x09B2}, {0x09B6, 0x09B9},
+ {0x09BC, 0x09C4}, {0x09C7, 0x09C8}, {0x09CB, 0x09CE},
+ {0x09D7, 0x09D7}, {0x09DC, 0x09DD}, {0x09DF, 0x09E3},
+ {0x09E6, 0x09FE}, {0x0A01, 0x0A03}, {0x0A05, 0x0A0A},
+ {0x0A0F, 0x0A10}, {0x0A13, 0x0A28}, {0x0A2A, 0x0A30},
+ {0x0A32, 0x0A33}, {0x0A35, 0x0A36}, {0x0A38, 0x0A39},
+ {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42}, {0x0A47, 0x0A48},
+ {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51}, {0x0A59, 0x0A5C},
+ {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76}, {0x0A81, 0x0A83},
+ {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91}, {0x0A93, 0x0AA8},
+ {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3}, {0x0AB5, 0x0AB9},
+ {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9}, {0x0ACB, 0x0ACD},
+ {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3}, {0x0AE6, 0x0AF1},
+ {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03}, {0x0B05, 0x0B0C},
+ {0x0B0F, 0x0B10}, {0x0B13, 0x0B28}, {0x0B2A, 0x0B30},
+ {0x0B32, 0x0B33}, {0x0B35, 0x0B39}, {0x0B3C, 0x0B44},
+ {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D}, {0x0B55, 0x0B57},
+ {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63}, {0x0B66, 0x0B77},
+ {0x0B82, 0x0B83}, {0x0B85, 0x0B8A}, {0x0B8E, 0x0B90},
+ {0x0B92, 0x0B95}, {0x0B99, 0x0B9A}, {0x0B9C, 0x0B9C},
+ {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4}, {0x0BA8, 0x0BAA},
+ {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2}, {0x0BC6, 0x0BC8},
+ {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0}, {0x0BD7, 0x0BD7},
+ {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C}, {0x0C0E, 0x0C10},
+ {0x0C12, 0x0C28}, {0x0C2A, 0x0C39}, {0x0C3C, 0x0C44},
+ {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D}, {0x0C55, 0x0C56},
+ {0x0C58, 0x0C5A}, {0x0C5D, 0x0C5D}, {0x0C60, 0x0C63},
{0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90},
{0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
{0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
- {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
- {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D0C},
+ {0x0CD5, 0x0CD6}, {0x0CDD, 0x0CDE}, {0x0CE0, 0x0CE3},
+ {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF3}, {0x0D00, 0x0D0C},
{0x0D0E, 0x0D10}, {0x0D12, 0x0D44}, {0x0D46, 0x0D48},
{0x0D4A, 0x0D4F}, {0x0D54, 0x0D63}, {0x0D66, 0x0D7F},
{0x0D81, 0x0D83}, {0x0D85, 0x0D96}, {0x0D9A, 0x0DB1},
@@ -200,7 +202,7 @@ var neutral = table{
{0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B}, {0x0E81, 0x0E82},
{0x0E84, 0x0E84}, {0x0E86, 0x0E8A}, {0x0E8C, 0x0EA3},
{0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD}, {0x0EC0, 0x0EC4},
- {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD}, {0x0ED0, 0x0ED9},
+ {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECE}, {0x0ED0, 0x0ED9},
{0x0EDC, 0x0EDF}, {0x0F00, 0x0F47}, {0x0F49, 0x0F6C},
{0x0F71, 0x0F97}, {0x0F99, 0x0FBC}, {0x0FBE, 0x0FCC},
{0x0FCE, 0x0FDA}, {0x1000, 0x10C5}, {0x10C7, 0x10C7},
@@ -212,20 +214,19 @@ var neutral = table{
{0x12D8, 0x1310}, {0x1312, 0x1315}, {0x1318, 0x135A},
{0x135D, 0x137C}, {0x1380, 0x1399}, {0x13A0, 0x13F5},
{0x13F8, 0x13FD}, {0x1400, 0x169C}, {0x16A0, 0x16F8},
- {0x1700, 0x170C}, {0x170E, 0x1714}, {0x1720, 0x1736},
- {0x1740, 0x1753}, {0x1760, 0x176C}, {0x176E, 0x1770},
- {0x1772, 0x1773}, {0x1780, 0x17DD}, {0x17E0, 0x17E9},
- {0x17F0, 0x17F9}, {0x1800, 0x180E}, {0x1810, 0x1819},
- {0x1820, 0x1878}, {0x1880, 0x18AA}, {0x18B0, 0x18F5},
- {0x1900, 0x191E}, {0x1920, 0x192B}, {0x1930, 0x193B},
- {0x1940, 0x1940}, {0x1944, 0x196D}, {0x1970, 0x1974},
- {0x1980, 0x19AB}, {0x19B0, 0x19C9}, {0x19D0, 0x19DA},
- {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E}, {0x1A60, 0x1A7C},
- {0x1A7F, 0x1A89}, {0x1A90, 0x1A99}, {0x1AA0, 0x1AAD},
- {0x1AB0, 0x1AC0}, {0x1B00, 0x1B4B}, {0x1B50, 0x1B7C},
- {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37}, {0x1C3B, 0x1C49},
- {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA}, {0x1CBD, 0x1CC7},
- {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9}, {0x1DFB, 0x1F15},
+ {0x1700, 0x1715}, {0x171F, 0x1736}, {0x1740, 0x1753},
+ {0x1760, 0x176C}, {0x176E, 0x1770}, {0x1772, 0x1773},
+ {0x1780, 0x17DD}, {0x17E0, 0x17E9}, {0x17F0, 0x17F9},
+ {0x1800, 0x1819}, {0x1820, 0x1878}, {0x1880, 0x18AA},
+ {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1920, 0x192B},
+ {0x1930, 0x193B}, {0x1940, 0x1940}, {0x1944, 0x196D},
+ {0x1970, 0x1974}, {0x1980, 0x19AB}, {0x19B0, 0x19C9},
+ {0x19D0, 0x19DA}, {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E},
+ {0x1A60, 0x1A7C}, {0x1A7F, 0x1A89}, {0x1A90, 0x1A99},
+ {0x1AA0, 0x1AAD}, {0x1AB0, 0x1ACE}, {0x1B00, 0x1B4C},
+ {0x1B50, 0x1B7E}, {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37},
+ {0x1C3B, 0x1C49}, {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA},
+ {0x1CBD, 0x1CC7}, {0x1CD0, 0x1CFA}, {0x1D00, 0x1F15},
{0x1F18, 0x1F1D}, {0x1F20, 0x1F45}, {0x1F48, 0x1F4D},
{0x1F50, 0x1F57}, {0x1F59, 0x1F59}, {0x1F5B, 0x1F5B},
{0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D}, {0x1F80, 0x1FB4},
@@ -237,7 +238,7 @@ var neutral = table{
{0x2036, 0x203A}, {0x203C, 0x203D}, {0x203F, 0x2064},
{0x2066, 0x2071}, {0x2075, 0x207E}, {0x2080, 0x2080},
{0x2085, 0x208E}, {0x2090, 0x209C}, {0x20A0, 0x20A8},
- {0x20AA, 0x20AB}, {0x20AD, 0x20BF}, {0x20D0, 0x20F0},
+ {0x20AA, 0x20AB}, {0x20AD, 0x20C0}, {0x20D0, 0x20F0},
{0x2100, 0x2102}, {0x2104, 0x2104}, {0x2106, 0x2108},
{0x210A, 0x2112}, {0x2114, 0x2115}, {0x2117, 0x2120},
{0x2123, 0x2125}, {0x2127, 0x212A}, {0x212C, 0x2152},
@@ -275,15 +276,15 @@ var neutral = table{
{0x2780, 0x2794}, {0x2798, 0x27AF}, {0x27B1, 0x27BE},
{0x27C0, 0x27E5}, {0x27EE, 0x2984}, {0x2987, 0x2B1A},
{0x2B1D, 0x2B4F}, {0x2B51, 0x2B54}, {0x2B5A, 0x2B73},
- {0x2B76, 0x2B95}, {0x2B97, 0x2C2E}, {0x2C30, 0x2C5E},
- {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25}, {0x2D27, 0x2D27},
- {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67}, {0x2D6F, 0x2D70},
- {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6}, {0x2DA8, 0x2DAE},
- {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE}, {0x2DC0, 0x2DC6},
- {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6}, {0x2DD8, 0x2DDE},
- {0x2DE0, 0x2E52}, {0x303F, 0x303F}, {0x4DC0, 0x4DFF},
- {0xA4D0, 0xA62B}, {0xA640, 0xA6F7}, {0xA700, 0xA7BF},
- {0xA7C2, 0xA7CA}, {0xA7F5, 0xA82C}, {0xA830, 0xA839},
+ {0x2B76, 0x2B95}, {0x2B97, 0x2CF3}, {0x2CF9, 0x2D25},
+ {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67},
+ {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6},
+ {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE},
+ {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6},
+ {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E5D}, {0x303F, 0x303F},
+ {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B}, {0xA640, 0xA6F7},
+ {0xA700, 0xA7CA}, {0xA7D0, 0xA7D1}, {0xA7D3, 0xA7D3},
+ {0xA7D5, 0xA7D9}, {0xA7F2, 0xA82C}, {0xA830, 0xA839},
{0xA840, 0xA877}, {0xA880, 0xA8C5}, {0xA8CE, 0xA8D9},
{0xA8E0, 0xA953}, {0xA95F, 0xA95F}, {0xA980, 0xA9CD},
{0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE}, {0xAA00, 0xAA36},
@@ -294,8 +295,8 @@ var neutral = table{
{0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB}, {0xD800, 0xDFFF},
{0xFB00, 0xFB06}, {0xFB13, 0xFB17}, {0xFB1D, 0xFB36},
{0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E}, {0xFB40, 0xFB41},
- {0xFB43, 0xFB44}, {0xFB46, 0xFBC1}, {0xFBD3, 0xFD3F},
- {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7}, {0xFDF0, 0xFDFD},
+ {0xFB43, 0xFB44}, {0xFB46, 0xFBC2}, {0xFBD3, 0xFD8F},
+ {0xFD92, 0xFDC7}, {0xFDCF, 0xFDCF}, {0xFDF0, 0xFDFF},
{0xFE20, 0xFE2F}, {0xFE70, 0xFE74}, {0xFE76, 0xFEFC},
{0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC}, {0x10000, 0x1000B},
{0x1000D, 0x10026}, {0x10028, 0x1003A}, {0x1003C, 0x1003D},
@@ -307,44 +308,48 @@ var neutral = table{
{0x10380, 0x1039D}, {0x1039F, 0x103C3}, {0x103C8, 0x103D5},
{0x10400, 0x1049D}, {0x104A0, 0x104A9}, {0x104B0, 0x104D3},
{0x104D8, 0x104FB}, {0x10500, 0x10527}, {0x10530, 0x10563},
- {0x1056F, 0x1056F}, {0x10600, 0x10736}, {0x10740, 0x10755},
- {0x10760, 0x10767}, {0x10800, 0x10805}, {0x10808, 0x10808},
- {0x1080A, 0x10835}, {0x10837, 0x10838}, {0x1083C, 0x1083C},
- {0x1083F, 0x10855}, {0x10857, 0x1089E}, {0x108A7, 0x108AF},
- {0x108E0, 0x108F2}, {0x108F4, 0x108F5}, {0x108FB, 0x1091B},
- {0x1091F, 0x10939}, {0x1093F, 0x1093F}, {0x10980, 0x109B7},
- {0x109BC, 0x109CF}, {0x109D2, 0x10A03}, {0x10A05, 0x10A06},
- {0x10A0C, 0x10A13}, {0x10A15, 0x10A17}, {0x10A19, 0x10A35},
- {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48}, {0x10A50, 0x10A58},
- {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6}, {0x10AEB, 0x10AF6},
- {0x10B00, 0x10B35}, {0x10B39, 0x10B55}, {0x10B58, 0x10B72},
- {0x10B78, 0x10B91}, {0x10B99, 0x10B9C}, {0x10BA9, 0x10BAF},
- {0x10C00, 0x10C48}, {0x10C80, 0x10CB2}, {0x10CC0, 0x10CF2},
- {0x10CFA, 0x10D27}, {0x10D30, 0x10D39}, {0x10E60, 0x10E7E},
- {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD}, {0x10EB0, 0x10EB1},
- {0x10F00, 0x10F27}, {0x10F30, 0x10F59}, {0x10FB0, 0x10FCB},
- {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F},
- {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8},
- {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11147},
- {0x11150, 0x11176}, {0x11180, 0x111DF}, {0x111E1, 0x111F4},
- {0x11200, 0x11211}, {0x11213, 0x1123E}, {0x11280, 0x11286},
- {0x11288, 0x11288}, {0x1128A, 0x1128D}, {0x1128F, 0x1129D},
- {0x1129F, 0x112A9}, {0x112B0, 0x112EA}, {0x112F0, 0x112F9},
- {0x11300, 0x11303}, {0x11305, 0x1130C}, {0x1130F, 0x11310},
- {0x11313, 0x11328}, {0x1132A, 0x11330}, {0x11332, 0x11333},
- {0x11335, 0x11339}, {0x1133B, 0x11344}, {0x11347, 0x11348},
- {0x1134B, 0x1134D}, {0x11350, 0x11350}, {0x11357, 0x11357},
- {0x1135D, 0x11363}, {0x11366, 0x1136C}, {0x11370, 0x11374},
- {0x11400, 0x1145B}, {0x1145D, 0x11461}, {0x11480, 0x114C7},
- {0x114D0, 0x114D9}, {0x11580, 0x115B5}, {0x115B8, 0x115DD},
- {0x11600, 0x11644}, {0x11650, 0x11659}, {0x11660, 0x1166C},
- {0x11680, 0x116B8}, {0x116C0, 0x116C9}, {0x11700, 0x1171A},
- {0x1171D, 0x1172B}, {0x11730, 0x1173F}, {0x11800, 0x1183B},
- {0x118A0, 0x118F2}, {0x118FF, 0x11906}, {0x11909, 0x11909},
- {0x1190C, 0x11913}, {0x11915, 0x11916}, {0x11918, 0x11935},
- {0x11937, 0x11938}, {0x1193B, 0x11946}, {0x11950, 0x11959},
- {0x119A0, 0x119A7}, {0x119AA, 0x119D7}, {0x119DA, 0x119E4},
- {0x11A00, 0x11A47}, {0x11A50, 0x11AA2}, {0x11AC0, 0x11AF8},
+ {0x1056F, 0x1057A}, {0x1057C, 0x1058A}, {0x1058C, 0x10592},
+ {0x10594, 0x10595}, {0x10597, 0x105A1}, {0x105A3, 0x105B1},
+ {0x105B3, 0x105B9}, {0x105BB, 0x105BC}, {0x10600, 0x10736},
+ {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10780, 0x10785},
+ {0x10787, 0x107B0}, {0x107B2, 0x107BA}, {0x10800, 0x10805},
+ {0x10808, 0x10808}, {0x1080A, 0x10835}, {0x10837, 0x10838},
+ {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10857, 0x1089E},
+ {0x108A7, 0x108AF}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5},
+ {0x108FB, 0x1091B}, {0x1091F, 0x10939}, {0x1093F, 0x1093F},
+ {0x10980, 0x109B7}, {0x109BC, 0x109CF}, {0x109D2, 0x10A03},
+ {0x10A05, 0x10A06}, {0x10A0C, 0x10A13}, {0x10A15, 0x10A17},
+ {0x10A19, 0x10A35}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48},
+ {0x10A50, 0x10A58}, {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6},
+ {0x10AEB, 0x10AF6}, {0x10B00, 0x10B35}, {0x10B39, 0x10B55},
+ {0x10B58, 0x10B72}, {0x10B78, 0x10B91}, {0x10B99, 0x10B9C},
+ {0x10BA9, 0x10BAF}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2},
+ {0x10CC0, 0x10CF2}, {0x10CFA, 0x10D27}, {0x10D30, 0x10D39},
+ {0x10E60, 0x10E7E}, {0x10E80, 0x10EA9}, {0x10EAB, 0x10EAD},
+ {0x10EB0, 0x10EB1}, {0x10EFD, 0x10F27}, {0x10F30, 0x10F59},
+ {0x10F70, 0x10F89}, {0x10FB0, 0x10FCB}, {0x10FE0, 0x10FF6},
+ {0x11000, 0x1104D}, {0x11052, 0x11075}, {0x1107F, 0x110C2},
+ {0x110CD, 0x110CD}, {0x110D0, 0x110E8}, {0x110F0, 0x110F9},
+ {0x11100, 0x11134}, {0x11136, 0x11147}, {0x11150, 0x11176},
+ {0x11180, 0x111DF}, {0x111E1, 0x111F4}, {0x11200, 0x11211},
+ {0x11213, 0x11241}, {0x11280, 0x11286}, {0x11288, 0x11288},
+ {0x1128A, 0x1128D}, {0x1128F, 0x1129D}, {0x1129F, 0x112A9},
+ {0x112B0, 0x112EA}, {0x112F0, 0x112F9}, {0x11300, 0x11303},
+ {0x11305, 0x1130C}, {0x1130F, 0x11310}, {0x11313, 0x11328},
+ {0x1132A, 0x11330}, {0x11332, 0x11333}, {0x11335, 0x11339},
+ {0x1133B, 0x11344}, {0x11347, 0x11348}, {0x1134B, 0x1134D},
+ {0x11350, 0x11350}, {0x11357, 0x11357}, {0x1135D, 0x11363},
+ {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x11400, 0x1145B},
+ {0x1145D, 0x11461}, {0x11480, 0x114C7}, {0x114D0, 0x114D9},
+ {0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644},
+ {0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B9},
+ {0x116C0, 0x116C9}, {0x11700, 0x1171A}, {0x1171D, 0x1172B},
+ {0x11730, 0x11746}, {0x11800, 0x1183B}, {0x118A0, 0x118F2},
+ {0x118FF, 0x11906}, {0x11909, 0x11909}, {0x1190C, 0x11913},
+ {0x11915, 0x11916}, {0x11918, 0x11935}, {0x11937, 0x11938},
+ {0x1193B, 0x11946}, {0x11950, 0x11959}, {0x119A0, 0x119A7},
+ {0x119AA, 0x119D7}, {0x119DA, 0x119E4}, {0x11A00, 0x11A47},
+ {0x11A50, 0x11AA2}, {0x11AB0, 0x11AF8}, {0x11B00, 0x11B09},
{0x11C00, 0x11C08}, {0x11C0A, 0x11C36}, {0x11C38, 0x11C45},
{0x11C50, 0x11C6C}, {0x11C70, 0x11C8F}, {0x11C92, 0x11CA7},
{0x11CA9, 0x11CB6}, {0x11D00, 0x11D06}, {0x11D08, 0x11D09},
@@ -352,30 +357,36 @@ var neutral = table{
{0x11D3F, 0x11D47}, {0x11D50, 0x11D59}, {0x11D60, 0x11D65},
{0x11D67, 0x11D68}, {0x11D6A, 0x11D8E}, {0x11D90, 0x11D91},
{0x11D93, 0x11D98}, {0x11DA0, 0x11DA9}, {0x11EE0, 0x11EF8},
+ {0x11F00, 0x11F10}, {0x11F12, 0x11F3A}, {0x11F3E, 0x11F59},
{0x11FB0, 0x11FB0}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399},
{0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
- {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646},
+ {0x12F90, 0x12FF2}, {0x13000, 0x13455}, {0x14400, 0x14646},
{0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69},
- {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5},
- {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61},
- {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A},
- {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F},
- {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88},
- {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5},
- {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245},
- {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378},
- {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
- {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
- {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
- {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
- {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
- {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
- {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
- {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
- {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
- {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
- {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9},
- {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
+ {0x16A6E, 0x16ABE}, {0x16AC0, 0x16AC9}, {0x16AD0, 0x16AED},
+ {0x16AF0, 0x16AF5}, {0x16B00, 0x16B45}, {0x16B50, 0x16B59},
+ {0x16B5B, 0x16B61}, {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F},
+ {0x16E40, 0x16E9A}, {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87},
+ {0x16F8F, 0x16F9F}, {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C},
+ {0x1BC80, 0x1BC88}, {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3},
+ {0x1CF00, 0x1CF2D}, {0x1CF30, 0x1CF46}, {0x1CF50, 0x1CFC3},
+ {0x1D000, 0x1D0F5}, {0x1D100, 0x1D126}, {0x1D129, 0x1D1EA},
+ {0x1D200, 0x1D245}, {0x1D2C0, 0x1D2D3}, {0x1D2E0, 0x1D2F3},
+ {0x1D300, 0x1D356}, {0x1D360, 0x1D378}, {0x1D400, 0x1D454},
+ {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F}, {0x1D4A2, 0x1D4A2},
+ {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC}, {0x1D4AE, 0x1D4B9},
+ {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3}, {0x1D4C5, 0x1D505},
+ {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514}, {0x1D516, 0x1D51C},
+ {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E}, {0x1D540, 0x1D544},
+ {0x1D546, 0x1D546}, {0x1D54A, 0x1D550}, {0x1D552, 0x1D6A5},
+ {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B}, {0x1DA9B, 0x1DA9F},
+ {0x1DAA1, 0x1DAAF}, {0x1DF00, 0x1DF1E}, {0x1DF25, 0x1DF2A},
+ {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021},
+ {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E030, 0x1E06D},
+ {0x1E08F, 0x1E08F}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
+ {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E290, 0x1E2AE},
+ {0x1E2C0, 0x1E2F9}, {0x1E2FF, 0x1E2FF}, {0x1E4D0, 0x1E4F9},
+ {0x1E7E0, 0x1E7E6}, {0x1E7E8, 0x1E7EB}, {0x1E7ED, 0x1E7EE},
+ {0x1E7F0, 0x1E7FE}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
{0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
{0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03},
{0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24},
@@ -400,8 +411,8 @@ var neutral = table{
{0x1F54F, 0x1F54F}, {0x1F568, 0x1F579}, {0x1F57B, 0x1F594},
{0x1F597, 0x1F5A3}, {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F},
{0x1F6C6, 0x1F6CB}, {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4},
- {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773},
- {0x1F780, 0x1F7D8}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
+ {0x1F6E0, 0x1F6EA}, {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F776},
+ {0x1F77B, 0x1F7D9}, {0x1F800, 0x1F80B}, {0x1F810, 0x1F847},
{0x1F850, 0x1F859}, {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD},
{0x1F8B0, 0x1F8B1}, {0x1F900, 0x1F90B}, {0x1F93B, 0x1F93B},
{0x1F946, 0x1F946}, {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D},
diff --git a/vendor/github.com/mgechev/revive/config/config.go b/vendor/github.com/mgechev/revive/config/config.go
index fed1d1913..16559f5ec 100644
--- a/vendor/github.com/mgechev/revive/config/config.go
+++ b/vendor/github.com/mgechev/revive/config/config.go
@@ -96,6 +96,8 @@ var allRules = append([]lint.Rule{
&rule.EnforceSliceStyleRule{},
&rule.MaxControlNestingRule{},
&rule.CommentsDensityRule{},
+ &rule.FileLengthLimitRule{},
+ &rule.FilenameFormatRule{},
}, defaultRules...)
var allFormatters = []lint.Formatter{
diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go
index 23255304c..e34f8b7f4 100644
--- a/vendor/github.com/mgechev/revive/lint/file.go
+++ b/vendor/github.com/mgechev/revive/lint/file.go
@@ -188,7 +188,7 @@ func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, fa
enabledDisabledRulesMap[name] = existing
}
- handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval {
+ handleRules := func(_, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval {
var result []DisabledInterval
for _, name := range ruleNames {
if modifier == "line" {
diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go
index 3c97f306f..56036e83d 100644
--- a/vendor/github.com/mgechev/revive/lint/linter.go
+++ b/vendor/github.com/mgechev/revive/lint/linter.go
@@ -3,11 +3,9 @@ package lint
import (
"bufio"
"bytes"
- "encoding/json"
"fmt"
"go/token"
"os"
- "os/exec"
"path/filepath"
"regexp"
"strconv"
@@ -15,6 +13,7 @@ import (
"sync"
goversion "github.com/hashicorp/go-version"
+ "golang.org/x/mod/modfile"
)
// ReadFile defines an abstraction for reading files.
@@ -55,8 +54,9 @@ func (l Linter) readFile(path string) (result []byte, err error) {
}
var (
- genHdr = []byte("// Code generated ")
- genFtr = []byte(" DO NOT EDIT.")
+ genHdr = []byte("// Code generated ")
+ genFtr = []byte(" DO NOT EDIT.")
+ defaultGoVersion = goversion.Must(goversion.NewVersion("1.0"))
)
// Lint lints a set of files with the specified rule.
@@ -93,7 +93,9 @@ func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-cha
d, v, err := detectGoMod(dir)
if err != nil {
- return nil, err
+ // No luck finding the go.mod file thus set the default Go version
+ v = defaultGoVersion
+ d = dir
}
perModVersions[d] = v
perPkgVersions[n] = v
@@ -156,37 +158,42 @@ func (l *Linter) lintPackage(filenames []string, gover *goversion.Version, ruleS
}
func detectGoMod(dir string) (rootDir string, ver *goversion.Version, err error) {
- // https://github.com/golang/go/issues/44753#issuecomment-790089020
- cmd := exec.Command("go", "list", "-m", "-json")
- cmd.Dir = dir
+ modFileName, err := retrieveModFile(dir)
+ if err != nil {
+ return "", nil, fmt.Errorf("%q doesn't seem to be part of a Go module", dir)
+ }
- out, err := cmd.Output()
+ mod, err := os.ReadFile(modFileName)
if err != nil {
- return "", nil, fmt.Errorf("command go list: %w", err)
+ return "", nil, fmt.Errorf("failed to read %q, got %v", modFileName, err)
}
- // NOTE: A package may be part of a go workspace. In this case `go list -m`
- // lists all modules in the workspace, so we need to go through them all.
- d := json.NewDecoder(bytes.NewBuffer(out))
- for d.More() {
- var v struct {
- GoMod string `json:"GoMod"`
- GoVersion string `json:"GoVersion"`
- Dir string `json:"Dir"`
- }
- if err = d.Decode(&v); err != nil {
- return "", nil, err
- }
- if v.GoMod == "" {
- return "", nil, fmt.Errorf("not part of a module: %q", dir)
+ modAst, err := modfile.ParseLax(modFileName, mod, nil)
+ if err != nil {
+ return "", nil, fmt.Errorf("failed to parse %q, got %v", modFileName, err)
+ }
+
+ ver, err = goversion.NewVersion(modAst.Go.Version)
+ return filepath.Dir(modFileName), ver, err
+}
+
+func retrieveModFile(dir string) (string, error) {
+ const lookingForFile = "go.mod"
+ for {
+ if dir == "." || dir == "/" {
+ return "", fmt.Errorf("did not found %q file", lookingForFile)
}
- if v.Dir != "" && strings.HasPrefix(dir, v.Dir) {
- rootDir = v.Dir
- ver, err = goversion.NewVersion(strings.TrimPrefix(v.GoVersion, "go"))
- return rootDir, ver, err
+
+ lookingForFilePath := filepath.Join(dir, lookingForFile)
+ info, err := os.Stat(lookingForFilePath)
+ if err != nil || info.IsDir() {
+ // lets check the parent dir
+ dir = filepath.Dir(dir)
+ continue
}
+
+ return lookingForFilePath, nil
}
- return "", nil, fmt.Errorf("not part of a module: %q", dir)
}
// isGenerated reports whether the source file is generated code
diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/name.go
index 6ccfb0ef2..6ccfb0ef2 100644
--- a/vendor/github.com/mgechev/revive/lint/utils.go
+++ b/vendor/github.com/mgechev/revive/lint/name.go
diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go
index b4a0a72c7..2ab035f16 100644
--- a/vendor/github.com/mgechev/revive/lint/package.go
+++ b/vendor/github.com/mgechev/revive/lint/package.go
@@ -33,6 +33,7 @@ var (
falseValue = 2
notSet = 3
+ go121 = goversion.Must(goversion.NewVersion("1.21"))
go122 = goversion.Must(goversion.NewVersion("1.22"))
)
@@ -165,17 +166,17 @@ func (p *Package) scanSortable() {
// bitfield for which methods exist on each type.
const (
- Len = 1 << iota
- Less
- Swap
+ bfLen = 1 << iota
+ bfLess
+ bfSwap
)
- nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap}
+ nmap := map[string]int{"Len": bfLen, "Less": bfLess, "Swap": bfSwap}
has := make(map[string]int)
for _, f := range p.files {
ast.Walk(&walker{nmap, has}, f.AST)
}
for typ, ms := range has {
- if ms == Len|Less|Swap {
+ if ms == bfLen|bfLess|bfSwap {
p.sortable[typ] = true
}
}
@@ -194,6 +195,11 @@ func (p *Package) lint(rules []Rule, config Config, failures chan Failure) {
wg.Wait()
}
+// IsAtLeastGo121 returns true if the Go version for this package is 1.21 or higher, false otherwise
+func (p *Package) IsAtLeastGo121() bool {
+ return p.goVersion.GreaterThanOrEqual(go121)
+}
+
// IsAtLeastGo122 returns true if the Go version for this package is 1.22 or higher, false otherwise
func (p *Package) IsAtLeastGo122() bool {
return p.goVersion.GreaterThanOrEqual(go122)
diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go
index 73dfa932c..233f1d848 100644
--- a/vendor/github.com/mgechev/revive/rule/add-constant.go
+++ b/vendor/github.com/mgechev/revive/rule/add-constant.go
@@ -160,12 +160,15 @@ func (w *lintAddConstantRule) isIgnoredFunc(fName string) bool {
}
func (w *lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
+ const ignoreMarker = -1
+
if w.allowList[kindSTRING][n.Value] {
return
}
count := w.strLits[n.Value]
- if count >= 0 {
+ mustCheck := count > ignoreMarker
+ if mustCheck {
w.strLits[n.Value] = count + 1
if w.strLits[n.Value] > w.strLitLimit {
w.onFailure(lint.Failure{
diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go
index 8120288fd..b6ce0e81a 100644
--- a/vendor/github.com/mgechev/revive/rule/argument-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go
@@ -10,7 +10,7 @@ import (
// ArgumentsLimitRule lints given else constructs.
type ArgumentsLimitRule struct {
- total int
+ max int
sync.Mutex
}
@@ -19,18 +19,20 @@ const defaultArgumentsLimit = 8
func (r *ArgumentsLimitRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.total == 0 {
- if len(arguments) < 1 {
- r.total = defaultArgumentsLimit
- return
- }
+ if r.max != 0 {
+ return
+ }
- total, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(`invalid value passed as argument number to the "argument-limit" rule`)
- }
- r.total = int(total)
+ if len(arguments) < 1 {
+ r.max = defaultArgumentsLimit
+ return
}
+
+ maxArguments, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(`invalid value passed as argument number to the "argument-limit" rule`)
+ }
+ r.max = int(maxArguments)
}
// Apply applies the rule to given file.
@@ -43,7 +45,7 @@ func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []
}
walker := lintArgsNum{
- total: r.total,
+ max: r.max,
onFailure: onFailure,
}
@@ -58,27 +60,30 @@ func (*ArgumentsLimitRule) Name() string {
}
type lintArgsNum struct {
- total int
+ max int
onFailure func(lint.Failure)
}
func (w lintArgsNum) Visit(n ast.Node) ast.Visitor {
node, ok := n.(*ast.FuncDecl)
- if ok {
- num := 0
- for _, l := range node.Type.Params.List {
- for range l.Names {
- num++
- }
- }
- if num > w.total {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num),
- Node: node.Type,
- })
- return w
+ if !ok {
+ return w
+ }
+
+ num := 0
+ for _, l := range node.Type.Params.List {
+ for range l.Names {
+ num++
}
}
- return w
+
+ if num > w.max {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.max, num),
+ Node: node.Type,
+ })
+ }
+
+ return nil // skip visiting the body of the function
}
diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go
index a3d50b4f7..0ddb4aad2 100644
--- a/vendor/github.com/mgechev/revive/rule/blank-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/blank-imports.go
@@ -22,9 +22,8 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
}
const (
- message = "a blank import should be only in a main or test package, or have a comment justifying it"
- category = "imports"
-
+ message = "a blank import should be only in a main or test package, or have a comment justifying it"
+ category = "imports"
embedImportPath = `"embed"`
)
@@ -39,7 +38,8 @@ func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
continue // Ignore non-blank imports.
}
- if i > 0 {
+ isNotFirstElement := i > 0
+ if isNotFirstElement {
prev := file.AST.Imports[i-1]
prevPos := file.ToPosition(prev.Pos())
diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
index d6150339b..71551e55a 100644
--- a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
+++ b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
@@ -45,7 +45,6 @@ func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor {
lexeme, ok := isExprABooleanLit(n.X)
if !ok {
lexeme, ok = isExprABooleanLit(n.Y)
-
if !ok {
return w
}
diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
index 1973faef8..83640fd3d 100644
--- a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
+++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
@@ -21,19 +21,21 @@ const defaultMaxCognitiveComplexity = 7
func (r *CognitiveComplexityRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.maxComplexity == 0 {
+ if r.maxComplexity != 0 {
+ return // already configured
+ }
- if len(arguments) < 1 {
- r.maxComplexity = defaultMaxCognitiveComplexity
- return
- }
+ if len(arguments) < 1 {
+ r.maxComplexity = defaultMaxCognitiveComplexity
+ return
+ }
- complexity, ok := arguments[0].(int64)
- if !ok {
- panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0]))
- }
- r.maxComplexity = int(complexity)
+ complexity, ok := arguments[0].(int64)
+ if !ok {
+ panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0]))
}
+
+ r.maxComplexity = int(complexity)
}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/comment-spacings.go b/vendor/github.com/mgechev/revive/rule/comment-spacings.go
index bfb7eaf23..f72151301 100644
--- a/vendor/github.com/mgechev/revive/rule/comment-spacings.go
+++ b/vendor/github.com/mgechev/revive/rule/comment-spacings.go
@@ -18,16 +18,17 @@ type CommentSpacingsRule struct {
func (r *CommentSpacingsRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
+ if r.allowList != nil {
+ return // already configured
+ }
- if r.allowList == nil {
- r.allowList = []string{}
- for _, arg := range arguments {
- allow, ok := arg.(string) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg))
- }
- r.allowList = append(r.allowList, `//`+allow)
+ r.allowList = []string{}
+ for _, arg := range arguments {
+ allow, ok := arg.(string) // Alt. non panicking version
+ if !ok {
+ panic(fmt.Sprintf("invalid argument %v for %s; expected string but got %T", arg, r.Name(), arg))
}
+ r.allowList = append(r.allowList, `//`+allow)
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/comments-density.go b/vendor/github.com/mgechev/revive/rule/comments-density.go
index 5956fea23..c5298ea07 100644
--- a/vendor/github.com/mgechev/revive/rule/comments-density.go
+++ b/vendor/github.com/mgechev/revive/rule/comments-density.go
@@ -53,7 +53,8 @@ func (r *CommentsDensityRule) Apply(file *lint.File, arguments lint.Arguments) [
{
Node: file.AST,
Confidence: 1,
- Failure: fmt.Sprintf("the file has a comment density of %2.f%% (%d comment lines for %d code lines) but expected a minimum of %d%%", density, commentsLines, statementsCount, r.minimumCommentsDensity),
+ Failure: fmt.Sprintf("the file has a comment density of %2.f%% (%d comment lines for %d code lines) but expected a minimum of %d%%",
+ density, commentsLines, statementsCount, r.minimumCommentsDensity),
},
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
index 36cd641f7..9e34d3d16 100644
--- a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
+++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
@@ -41,8 +41,9 @@ func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor {
return w
}
- if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same
- return w
+ subExpressionsAreNotEqual := gofmt(n.X) != gofmt(n.Y)
+ if subExpressionsAreNotEqual {
+ return w // nothing to say
}
// Handles cases like: a <= a, a == a, a >= a
diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
index 9f6d50043..10413de24 100644
--- a/vendor/github.com/mgechev/revive/rule/cyclomatic.go
+++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
@@ -22,18 +22,20 @@ const defaultMaxCyclomaticComplexity = 10
func (r *CyclomaticRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.maxComplexity == 0 {
- if len(arguments) < 1 {
- r.maxComplexity = defaultMaxCyclomaticComplexity
- return
- }
+ if r.maxComplexity != 0 {
+ return // already configured
+ }
- complexity, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0]))
- }
- r.maxComplexity = int(complexity)
+ if len(arguments) < 1 {
+ r.maxComplexity = defaultMaxCyclomaticComplexity
+ return
+ }
+
+ complexity, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(fmt.Sprintf("invalid argument for cyclomatic complexity; expected int but got %T", arguments[0]))
}
+ r.maxComplexity = int(complexity)
}
// Apply applies the rule to given file.
@@ -70,31 +72,35 @@ type lintCyclomatic struct {
func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor {
f := w.file
for _, decl := range f.AST.Decls {
- if fn, ok := decl.(*ast.FuncDecl); ok {
- c := complexity(fn)
- if c > w.complexity {
- w.onFailure(lint.Failure{
- Confidence: 1,
- Category: "maintenance",
- Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)",
- funcName(fn), c, w.complexity),
- Node: fn,
- })
- }
+ fn, ok := decl.(*ast.FuncDecl)
+ if !ok {
+ continue
+ }
+
+ c := complexity(fn)
+ if c > w.complexity {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Category: "maintenance",
+ Failure: fmt.Sprintf("function %s has cyclomatic complexity %d (> max enabled %d)",
+ funcName(fn), c, w.complexity),
+ Node: fn,
+ })
}
}
+
return nil
}
// funcName returns the name representation of a function or method:
// "(Type).Name" for methods or simply "Name" for functions.
func funcName(fn *ast.FuncDecl) string {
- if fn.Recv != nil {
- if fn.Recv.NumFields() > 0 {
- typ := fn.Recv.List[0].Type
- return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
- }
+ declarationHasReceiver := fn.Recv != nil && fn.Recv.NumFields() > 0
+ if declarationHasReceiver {
+ typ := fn.Recv.List[0].Type
+ return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
}
+
return fn.Name.Name
}
diff --git a/vendor/github.com/mgechev/revive/rule/datarace.go b/vendor/github.com/mgechev/revive/rule/datarace.go
index 86ec6e113..21a7a706e 100644
--- a/vendor/github.com/mgechev/revive/rule/datarace.go
+++ b/vendor/github.com/mgechev/revive/rule/datarace.go
@@ -80,7 +80,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
return nil
}
- getIds := func(exprs ...ast.Expr) []*ast.Ident {
+ getIDs := func(exprs ...ast.Expr) []*ast.Ident {
r := []*ast.Ident{}
for _, expr := range exprs {
if id, ok := expr.(*ast.Ident); ok {
@@ -90,7 +90,7 @@ func (w lintFunctionForDataRaces) Visit(node ast.Node) ast.Visitor {
return r
}
- ids := getIds(n.Key, n.Value)
+ ids := getIDs(n.Key, n.Value)
for _, id := range ids {
w.rangeIDs[id.Obj] = struct{}{}
}
diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go
index 918d4294a..7b3dd0f82 100644
--- a/vendor/github.com/mgechev/revive/rule/deep-exit.go
+++ b/vendor/github.com/mgechev/revive/rule/deep-exit.go
@@ -73,9 +73,10 @@ func (w lintDeepExit) Visit(node ast.Node) ast.Visitor {
return w
}
- fn := fc.Sel.Name
pkg := id.Name
- if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function
+ fn := fc.Sel.Name
+ isACallToExitFunction := w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn]
+ if isACallToExitFunction {
w.onFailure(lint.Failure{
Confidence: 1,
Node: ce,
diff --git a/vendor/github.com/mgechev/revive/rule/defer.go b/vendor/github.com/mgechev/revive/rule/defer.go
index adc6478ae..3c31d507b 100644
--- a/vendor/github.com/mgechev/revive/rule/defer.go
+++ b/vendor/github.com/mgechev/revive/rule/defer.go
@@ -16,10 +16,12 @@ type DeferRule struct {
func (r *DeferRule) configure(arguments lint.Arguments) {
r.Lock()
- if r.allow == nil {
- r.allow = r.allowFromArgs(arguments)
+ defer r.Unlock()
+ if r.allow != nil {
+ return // already configured
}
- r.Unlock()
+
+ r.allow = r.allowFromArgs(arguments)
}
// Apply applies the rule to given file.
@@ -111,7 +113,7 @@ func (w lintDeferRule) Visit(node ast.Node) ast.Visitor {
// but it is very likely to be a misunderstanding of defer's behavior around arguments.
w.newFailure("recover must be called inside a deferred function, this is executing recover immediately", n, 1, "logic", "immediate-recover")
}
-
+ return nil // no need to analyze the arguments of the function call
case *ast.DeferStmt:
if isIdent(n.Call.Fun, "recover") {
// defer recover()
diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go
index 6b877677d..df0b2a7f4 100644
--- a/vendor/github.com/mgechev/revive/rule/dot-imports.go
+++ b/vendor/github.com/mgechev/revive/rule/dot-imports.go
@@ -59,17 +59,17 @@ func (r *DotImportsRule) configure(arguments lint.Arguments) {
}
if allowedPkgArg, ok := args["allowedPackages"]; ok {
- if pkgs, ok := allowedPkgArg.([]any); ok {
- for _, p := range pkgs {
- if pkg, ok := p.(string); ok {
- r.allowedPackages.add(pkg)
- } else {
- panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p))
- }
- }
- } else {
+ pkgs, ok := allowedPkgArg.([]any)
+ if !ok {
panic(fmt.Sprintf("Invalid argument to the dot-imports rule, []string expected. Got '%v' (%T)", allowedPkgArg, allowedPkgArg))
}
+ for _, p := range pkgs {
+ pkg, ok := p.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the dot-imports rule, string expected. Got '%v' (%T)", p, p))
+ }
+ r.allowedPackages.add(pkg)
+ }
}
}
@@ -81,12 +81,13 @@ type lintImports struct {
}
func (w lintImports) Visit(_ ast.Node) ast.Visitor {
- for _, is := range w.fileAst.Imports {
- if is.Name != nil && is.Name.Name == "." && !w.allowPackages.isAllowedPackage(is.Path.Value) {
+ for _, importSpec := range w.fileAst.Imports {
+ isDotImport := importSpec.Name != nil && importSpec.Name.Name == "."
+ if isDotImport && !w.allowPackages.isAllowedPackage(importSpec.Path.Value) {
w.onFailure(lint.Failure{
Confidence: 1,
Failure: "should not use dot imports",
- Node: is,
+ Node: importSpec,
Category: "imports",
})
}
diff --git a/vendor/github.com/mgechev/revive/rule/early-return.go b/vendor/github.com/mgechev/revive/rule/early-return.go
index 9c04a1dbe..62d491f27 100644
--- a/vendor/github.com/mgechev/revive/rule/early-return.go
+++ b/vendor/github.com/mgechev/revive/rule/early-return.go
@@ -21,27 +21,27 @@ func (*EarlyReturnRule) Name() string {
return "early-return"
}
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable.
+func (*EarlyReturnRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string {
if !chain.Else.Deviates() {
// this rule only applies if the else-block deviates control flow
- return
+ return ""
}
if chain.HasPriorNonDeviating && !chain.If.IsEmpty() {
// if we de-indent this block then a previous branch
// might flow into it, affecting program behaviour
- return
+ return ""
}
if chain.If.Deviates() {
// avoid overlapping with superfluous-else
- return
+ return ""
}
if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.If.HasDecls) {
// avoid increasing variable scope
- return
+ return ""
}
if chain.If.IsEmpty() {
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
index 36ac2374c..c698c40ed 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce-map-style.go
@@ -65,7 +65,6 @@ func (r *EnforceMapStyleRule) configure(arguments lint.Arguments) {
var err error
r.enforceMapStyle, err = mapStyleFromString(enforceMapStyle)
-
if err != nil {
panic(fmt.Sprintf("Invalid argument to the enforce-map-style rule: %v", err))
}
@@ -94,8 +93,8 @@ func (r *EnforceMapStyleRule) Apply(file *lint.File, arguments lint.Arguments) [
return true
}
- if len(v.Elts) > 0 {
- // not an empty map
+ isEmptyMap := len(v.Elts) > 0
+ if isEmptyMap {
return true
}
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
index 067082b1b..a435ee186 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce-repeated-arg-type-style.go
@@ -3,7 +3,6 @@ package rule
import (
"fmt"
"go/ast"
- "go/types"
"sync"
"github.com/mgechev/revive/lint"
@@ -104,13 +103,6 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.
var failures []lint.Failure
- err := file.Pkg.TypeCheck()
- if err != nil {
- // the file has other issues
- return nil
- }
- typesInfo := file.Pkg.TypesInfo()
-
astFile := file.AST
ast.Inspect(astFile, func(n ast.Node) bool {
switch fn := n.(type) {
@@ -134,12 +126,14 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.
var prevType ast.Expr
if fn.Type.Params != nil {
for _, field := range fn.Type.Params.List {
- if types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+ prevTypeStr := gofmt(prevType)
+ currentTypeStr := gofmt(field.Type)
+ if currentTypeStr == prevTypeStr {
failures = append(failures, lint.Failure{
Confidence: 1,
- Node: field,
+ Node: prevType,
Category: "style",
- Failure: "repeated argument type can be omitted",
+ Failure: fmt.Sprintf("repeated argument type %q can be omitted", prevTypeStr),
})
}
prevType = field.Type
@@ -166,12 +160,14 @@ func (r *EnforceRepeatedArgTypeStyleRule) Apply(file *lint.File, arguments lint.
var prevType ast.Expr
if fn.Type.Results != nil {
for _, field := range fn.Type.Results.List {
- if field.Names != nil && types.Identical(typesInfo.Types[field.Type].Type, typesInfo.Types[prevType].Type) {
+ prevTypeStr := gofmt(prevType)
+ currentTypeStr := gofmt(field.Type)
+ if field.Names != nil && currentTypeStr == prevTypeStr {
failures = append(failures, lint.Failure{
Confidence: 1,
- Node: field,
+ Node: prevType,
Category: "style",
- Failure: "repeated return type can be omitted",
+ Failure: fmt.Sprintf("repeated return type %q can be omitted", prevTypeStr),
})
}
prevType = field.Type
diff --git a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
index 60d8ac066..14be25893 100644
--- a/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
+++ b/vendor/github.com/mgechev/revive/rule/enforce-slice-style.go
@@ -69,7 +69,6 @@ func (r *EnforceSliceStyleRule) configure(arguments lint.Arguments) {
var err error
r.enforceSliceStyle, err = sliceStyleFromString(enforceSliceStyle)
-
if err != nil {
panic(fmt.Sprintf("Invalid argument to the enforce-slice-style rule: %v", err))
}
@@ -101,8 +100,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if len(v.Elts) > 0 {
- // not an empty slice
+ isNotEmptySlice := len(v.Elts) > 0
+ if isNotEmptySlice {
return true
}
@@ -132,8 +131,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if len(v.Args) < 2 {
- // skip invalid make declarations
+ isInvalidMakeDeclaration := len(v.Args) < 2
+ if isInvalidMakeDeclaration {
return true
}
@@ -148,8 +147,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if arg.Value != "0" {
- // skip slice with non-zero size
+ isSliceSizeNotZero := arg.Value != "0"
+ if isSliceSizeNotZero {
return true
}
@@ -160,8 +159,8 @@ func (r *EnforceSliceStyleRule) Apply(file *lint.File, arguments lint.Arguments)
return true
}
- if arg.Value != "0" {
- // skip non-zero capacity slice
+ isNonZeroCapacitySlice := arg.Value != "0"
+ if isNonZeroCapacitySlice {
return true
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go
index b8663c48c..e3972d40e 100644
--- a/vendor/github.com/mgechev/revive/rule/exported.go
+++ b/vendor/github.com/mgechev/revive/rule/exported.go
@@ -13,28 +13,93 @@ import (
"github.com/mgechev/revive/lint"
)
+// disabledChecks store ignored warnings types
+type disabledChecks struct {
+ Const bool
+ Function bool
+ Method bool
+ PrivateReceivers bool
+ PublicInterfaces bool
+ Stuttering bool
+ Type bool
+ Var bool
+}
+
+const checkNamePrivateReceivers = "privateReceivers"
+const checkNamePublicInterfaces = "publicInterfaces"
+const checkNameStuttering = "stuttering"
+
+// isDisabled returns true if the given check is disabled, false otherwise
+func (dc *disabledChecks) isDisabled(checkName string) bool {
+ switch checkName {
+ case "var":
+ return dc.Var
+ case "const":
+ return dc.Const
+ case "function":
+ return dc.Function
+ case "method":
+ return dc.Method
+ case checkNamePrivateReceivers:
+ return dc.PrivateReceivers
+ case checkNamePublicInterfaces:
+ return dc.PublicInterfaces
+ case checkNameStuttering:
+ return dc.Stuttering
+ case "type":
+ return dc.Type
+ default:
+ return false
+ }
+}
+
// ExportedRule lints given else constructs.
type ExportedRule struct {
- configured bool
- checkPrivateReceivers bool
- disableStutteringCheck bool
- stuttersMsg string
+ configured bool
+ stuttersMsg string
+ disabledChecks disabledChecks
sync.Mutex
}
func (r *ExportedRule) configure(arguments lint.Arguments) {
r.Lock()
- if !r.configured {
- var sayRepetitiveInsteadOfStutters bool
- r.checkPrivateReceivers, r.disableStutteringCheck, sayRepetitiveInsteadOfStutters = r.getConf(arguments)
- r.stuttersMsg = "stutters"
- if sayRepetitiveInsteadOfStutters {
- r.stuttersMsg = "is repetitive"
- }
+ defer r.Unlock()
+ if r.configured {
+ return
+ }
+ r.configured = true
- r.configured = true
+ r.disabledChecks = disabledChecks{PrivateReceivers: true, PublicInterfaces: true}
+ r.stuttersMsg = "stutters"
+ for _, flag := range arguments {
+ switch flag := flag.(type) {
+ case string:
+ switch flag {
+ case "checkPrivateReceivers":
+ r.disabledChecks.PrivateReceivers = false
+ case "disableStutteringCheck":
+ r.disabledChecks.Stuttering = true
+ case "sayRepetitiveInsteadOfStutters":
+ r.stuttersMsg = "is repetitive"
+ case "checkPublicInterface":
+ r.disabledChecks.PublicInterfaces = false
+ case "disableChecksOnConstants":
+ r.disabledChecks.Const = true
+ case "disableChecksOnFunctions":
+ r.disabledChecks.Function = true
+ case "disableChecksOnMethods":
+ r.disabledChecks.Method = true
+ case "disableChecksOnTypes":
+ r.disabledChecks.Type = true
+ case "disableChecksOnVariables":
+ r.disabledChecks.Var = true
+ default:
+ panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flag, r.Name()))
+ }
+ default:
+ panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag))
+ }
}
- r.Unlock()
}
// Apply applies the rule to given file.
@@ -55,9 +120,8 @@ func (r *ExportedRule) Apply(file *lint.File, args lint.Arguments) []lint.Failur
failures = append(failures, failure)
},
genDeclMissingComments: make(map[*ast.GenDecl]bool),
- checkPrivateReceivers: r.checkPrivateReceivers,
- disableStutteringCheck: r.disableStutteringCheck,
stuttersMsg: r.stuttersMsg,
+ disabledChecks: r.disabledChecks,
}
ast.Walk(&walker, fileAst)
@@ -70,61 +134,36 @@ func (*ExportedRule) Name() string {
return "exported"
}
-func (r *ExportedRule) getConf(args lint.Arguments) (checkPrivateReceivers, disableStutteringCheck, sayRepetitiveInsteadOfStutters bool) {
- // if any, we expect a slice of strings as configuration
- if len(args) < 1 {
- return
- }
- for _, flag := range args {
- flagStr, ok := flag.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument for the %s rule: expecting a string, got %T", r.Name(), flag))
- }
-
- switch flagStr {
- case "checkPrivateReceivers":
- checkPrivateReceivers = true
- case "disableStutteringCheck":
- disableStutteringCheck = true
- case "sayRepetitiveInsteadOfStutters":
- sayRepetitiveInsteadOfStutters = true
- default:
- panic(fmt.Sprintf("Unknown configuration flag %s for %s rule", flagStr, r.Name()))
- }
- }
-
- return
-}
-
type lintExported struct {
file *lint.File
fileAst *ast.File
lastGen *ast.GenDecl
genDeclMissingComments map[*ast.GenDecl]bool
onFailure func(lint.Failure)
- checkPrivateReceivers bool
- disableStutteringCheck bool
stuttersMsg string
+ disabledChecks disabledChecks
}
func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
if !ast.IsExported(fn.Name.Name) {
- // func is unexported
- return
+ return // func is unexported, nothing to do
}
+
kind := "function"
name := fn.Name.Name
- if fn.Recv != nil && len(fn.Recv.List) > 0 {
- // method
+ isMethod := fn.Recv != nil && len(fn.Recv.List) > 0
+ if isMethod {
kind = "method"
recv := typeparams.ReceiverType(fn)
- if !w.checkPrivateReceivers && !ast.IsExported(recv) {
- // receiver is unexported
+
+ if !ast.IsExported(recv) && w.disabledChecks.PrivateReceivers {
return
}
+
if commonMethods[name] {
return
}
+
switch name {
case "Len", "Less", "Swap":
sortables := w.file.Pkg.Sortable()
@@ -134,6 +173,11 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
}
name = recv + "." + name
}
+
+ if w.disabledChecks.isDisabled(kind) {
+ return
+ }
+
if fn.Doc == nil {
w.onFailure(lint.Failure{
Node: fn,
@@ -143,6 +187,7 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
})
return
}
+
s := normalizeText(fn.Doc.Text())
prefix := fn.Name.Name + " "
if !strings.HasPrefix(s, prefix) {
@@ -156,7 +201,7 @@ func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
}
func (w *lintExported) checkStutter(id *ast.Ident, thing string) {
- if w.disableStutteringCheck {
+ if w.disabledChecks.Stuttering {
return
}
@@ -190,9 +235,14 @@ func (w *lintExported) checkStutter(id *ast.Ident, thing string) {
}
func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
+ if w.disabledChecks.isDisabled("type") {
+ return
+ }
+
if !ast.IsExported(t.Name.Name) {
return
}
+
if doc == nil {
w.onFailure(lint.Failure{
Node: t,
@@ -214,14 +264,19 @@ func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
break
}
}
- if !strings.HasPrefix(s, t.Name.Name+" ") {
- w.onFailure(lint.Failure{
- Node: doc,
- Confidence: 1,
- Category: "comments",
- Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name),
- })
+
+ // if comment starts with name of type and has some text after - it's ok
+ expectedPrefix := t.Name.Name + " "
+ if strings.HasPrefix(s, expectedPrefix) {
+ return
}
+
+ w.onFailure(lint.Failure{
+ Node: doc,
+ Confidence: 1,
+ Category: "comments",
+ Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%s..." (with optional leading article)`, t.Name, expectedPrefix),
+ })
}
func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) {
@@ -230,6 +285,10 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
kind = "const"
}
+ if w.disabledChecks.isDisabled(kind) {
+ return
+ }
+
if len(vs.Names) > 1 {
// Check that none are exported except for the first.
for _, n := range vs.Names[1:] {
@@ -251,7 +310,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
return
}
- if vs.Doc == nil && vs.Comment == nil && gd.Doc == nil {
+ if vs.Doc == nil && gd.Doc == nil {
if genDeclMissingComments[gd] {
return
}
@@ -301,7 +360,7 @@ func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genD
//
// This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly
func normalizeText(t string) string {
- return strings.TrimPrefix(t, " ")
+ return strings.TrimSpace(t)
}
func (w *lintExported) Visit(n ast.Node) ast.Visitor {
@@ -330,7 +389,15 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor {
}
w.lintTypeDoc(v, doc)
w.checkStutter(v.Name, "type")
- // Don't proceed inside types.
+
+ if !w.disabledChecks.PublicInterfaces {
+ if iface, ok := v.Type.(*ast.InterfaceType); ok {
+ if ast.IsExported(v.Name.Name) {
+ w.doCheckPublicInterface(v.Name.Name, iface)
+ }
+ }
+ }
+
return nil
case *ast.ValueSpec:
w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments)
@@ -338,3 +405,38 @@ func (w *lintExported) Visit(n ast.Node) ast.Visitor {
}
return w
}
+
+func (w *lintExported) doCheckPublicInterface(typeName string, iface *ast.InterfaceType) {
+ for _, m := range iface.Methods.List {
+ w.lintInterfaceMethod(typeName, m)
+ }
+}
+
+func (w *lintExported) lintInterfaceMethod(typeName string, m *ast.Field) {
+ if len(m.Names) == 0 {
+ return
+ }
+ if !ast.IsExported(m.Names[0].Name) {
+ return
+ }
+ name := m.Names[0].Name
+ if m.Doc == nil {
+ w.onFailure(lint.Failure{
+ Node: m,
+ Confidence: 1,
+ Category: "comments",
+ Failure: fmt.Sprintf("public interface method %s.%s should be commented", typeName, name),
+ })
+ return
+ }
+ s := normalizeText(m.Doc.Text())
+ expectedPrefix := m.Names[0].Name + " "
+ if !strings.HasPrefix(s, expectedPrefix) {
+ w.onFailure(lint.Failure{
+ Node: m.Doc,
+ Confidence: 0.8,
+ Category: "comments",
+ Failure: fmt.Sprintf(`comment on exported interface method %s.%s should be of the form "%s..."`, typeName, name, expectedPrefix),
+ })
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go
index a7d69ff2b..0dcb57746 100644
--- a/vendor/github.com/mgechev/revive/rule/file-header.go
+++ b/vendor/github.com/mgechev/revive/rule/file-header.go
@@ -22,16 +22,18 @@ var (
func (r *FileHeaderRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.header == "" {
- if len(arguments) < 1 {
- return
- }
+ if r.header != "" {
+ return // already configured
+ }
- var ok bool
- r.header, ok = arguments[0].(string)
- if !ok {
- panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0]))
- }
+ if len(arguments) < 1 {
+ return
+ }
+
+ var ok bool
+ r.header, ok = arguments[0].(string)
+ if !ok {
+ panic(fmt.Sprintf("invalid argument for \"file-header\" rule: argument should be a string, got %T", arguments[0]))
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/file-length-limit.go b/vendor/github.com/mgechev/revive/rule/file-length-limit.go
new file mode 100644
index 000000000..c5a5641f4
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/file-length-limit.go
@@ -0,0 +1,138 @@
+package rule
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+ "sync"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FileLengthLimitRule lints the number of lines in a file.
+type FileLengthLimitRule struct {
+ // max is the maximum number of lines allowed in a file. 0 means the rule is disabled.
+ max int
+ // skipComments indicates whether to skip comment lines when counting lines.
+ skipComments bool
+ // skipBlankLines indicates whether to skip blank lines when counting lines.
+ skipBlankLines bool
+ sync.Mutex
+}
+
+// Apply applies the rule to given file.
+func (r *FileLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ r.configure(arguments)
+
+ if r.max <= 0 {
+ // when max is negative or 0 the rule is disabled
+ return nil
+ }
+
+ all := 0
+ blank := 0
+ scanner := bufio.NewScanner(bytes.NewReader(file.Content()))
+ for scanner.Scan() {
+ all++
+ if len(bytes.TrimSpace(scanner.Bytes())) == 0 {
+ blank++
+ }
+ }
+
+ if err := scanner.Err(); err != nil {
+ panic(err.Error())
+ }
+
+ lines := all
+ if r.skipComments {
+ lines -= countCommentLines(file.AST.Comments)
+ }
+
+ if r.skipBlankLines {
+ lines -= blank
+ }
+
+ if lines <= r.max {
+ return nil
+ }
+
+ return []lint.Failure{
+ {
+ Category: "code-style",
+ Confidence: 1,
+ Position: lint.FailurePosition{
+ Start: token.Position{
+ Filename: file.Name,
+ Line: all,
+ },
+ },
+ Failure: fmt.Sprintf("file length is %d lines, which exceeds the limit of %d", lines, r.max),
+ },
+ }
+}
+
+func (r *FileLengthLimitRule) configure(arguments lint.Arguments) {
+ r.Lock()
+ defer r.Unlock()
+
+ if r.max != 0 {
+ return // already configured
+ }
+
+ if len(arguments) < 1 {
+ return // use default
+ }
+
+ argKV, ok := arguments[0].(map[string]any)
+ if !ok {
+ panic(fmt.Sprintf(`invalid argument to the "file-length-limit" rule. Expecting a k,v map, got %T`, arguments[0]))
+ }
+ for k, v := range argKV {
+ switch k {
+ case "max":
+ maxLines, ok := v.(int64)
+ if !ok || maxLines < 0 {
+ panic(fmt.Sprintf(`invalid configuration value for max lines in "file-length-limit" rule; need positive int64 but got %T`, arguments[0]))
+ }
+ r.max = int(maxLines)
+ case "skipComments":
+ skipComments, ok := v.(bool)
+ if !ok {
+ panic(fmt.Sprintf(`invalid configuration value for skip comments in "file-length-limit" rule; need bool but got %T`, arguments[1]))
+ }
+ r.skipComments = skipComments
+ case "skipBlankLines":
+ skipBlankLines, ok := v.(bool)
+ if !ok {
+ panic(fmt.Sprintf(`invalid configuration value for skip blank lines in "file-length-limit" rule; need bool but got %T`, arguments[2]))
+ }
+ r.skipBlankLines = skipBlankLines
+ }
+ }
+}
+
+// Name returns the rule name.
+func (*FileLengthLimitRule) Name() string {
+ return "file-length-limit"
+}
+
+func countCommentLines(comments []*ast.CommentGroup) int {
+ count := 0
+ for _, cg := range comments {
+ for _, comment := range cg.List {
+ if len(comment.Text) < 2 {
+ continue
+ }
+ switch comment.Text[1] {
+ case '/': // single-line comment
+ count++
+ case '*': // multi-line comment
+ count += strings.Count(comment.Text, "\n") + 1
+ }
+ }
+ }
+ return count
+}
diff --git a/vendor/github.com/mgechev/revive/rule/filename-format.go b/vendor/github.com/mgechev/revive/rule/filename-format.go
new file mode 100644
index 000000000..49fdf9c3e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/filename-format.go
@@ -0,0 +1,87 @@
+package rule
+
+import (
+ "fmt"
+ "path/filepath"
+ "regexp"
+ "sync"
+ "unicode"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FilenameFormatRule lints source filenames according to a set of regular expressions given as arguments
+type FilenameFormatRule struct {
+ format *regexp.Regexp
+ sync.Mutex
+}
+
+// Apply applies the rule to the given file.
+func (r *FilenameFormatRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ r.configure(arguments)
+
+ filename := filepath.Base(file.Name)
+ if r.format.MatchString(filename) {
+ return nil
+ }
+
+ failureMsg := fmt.Sprintf("Filename %s is not of the format %s.%s", filename, r.format.String(), r.getMsgForNonASCIIChars(filename))
+ return []lint.Failure{{
+ Confidence: 1,
+ Failure: failureMsg,
+ RuleName: r.Name(),
+ Node: file.AST.Name,
+ }}
+}
+
+func (r *FilenameFormatRule) getMsgForNonASCIIChars(str string) string {
+ result := ""
+ for _, c := range str {
+ if c <= unicode.MaxASCII {
+ continue
+ }
+
+ result += fmt.Sprintf(" Non ASCII character %c (%U) found.", c, c)
+ }
+
+ return result
+}
+
+// Name returns the rule name.
+func (*FilenameFormatRule) Name() string {
+ return "filename-format"
+}
+
+var defaultFormat = regexp.MustCompile("^[_A-Za-z0-9][_A-Za-z0-9-]*.go$")
+
+func (r *FilenameFormatRule) configure(arguments lint.Arguments) {
+ r.Lock()
+ defer r.Unlock()
+
+ if r.format != nil {
+ return
+ }
+
+ argsCount := len(arguments)
+ if argsCount == 0 {
+ r.format = defaultFormat
+ return
+ }
+
+ if argsCount > 1 {
+ panic(fmt.Sprintf("rule %q expects only one argument, got %d %v", r.Name(), argsCount, arguments))
+ }
+
+ arg := arguments[0]
+ str, ok := arg.(string)
+ if !ok {
+ panic(fmt.Sprintf("rule %q expects a string argument, got %v of type %T", r.Name(), arg, arg))
+ }
+
+ format, err := regexp.Compile(str)
+ if err != nil {
+ panic(fmt.Sprintf("rule %q expects a valid regexp argument, got %v for %s", r.Name(), err, arg))
+ }
+
+ r.format = format
+}
diff --git a/vendor/github.com/mgechev/revive/rule/function-length.go b/vendor/github.com/mgechev/revive/rule/function-length.go
index fd65884e9..30402313d 100644
--- a/vendor/github.com/mgechev/revive/rule/function-length.go
+++ b/vendor/github.com/mgechev/revive/rule/function-length.go
@@ -20,12 +20,14 @@ type FunctionLength struct {
func (r *FunctionLength) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if !r.configured {
- maxStmt, maxLines := r.parseArguments(arguments)
- r.maxStmt = int(maxStmt)
- r.maxLines = int(maxLines)
- r.configured = true
+ if r.configured {
+ return
}
+
+ r.configured = true
+ maxStmt, maxLines := r.parseArguments(arguments)
+ r.maxStmt = int(maxStmt)
+ r.maxLines = int(maxLines)
}
// Apply applies the rule to given file.
@@ -61,8 +63,9 @@ func (*FunctionLength) parseArguments(arguments lint.Arguments) (maxStmt, maxLin
return defaultFuncStmtsLimit, defaultFuncLinesLimit
}
- if len(arguments) != 2 {
- panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected 2 arguments but got %d`, len(arguments)))
+ const minArguments = 2
+ if len(arguments) != minArguments {
+ panic(fmt.Sprintf(`invalid configuration for "function-length" rule, expected %d arguments but got %d`, minArguments, len(arguments)))
}
maxStmt, maxStmtOk := arguments[0].(int64)
@@ -98,7 +101,8 @@ func (w lintFuncLength) Visit(n ast.Node) ast.Visitor {
}
body := node.Body
- if body == nil || len(node.Body.List) == 0 {
+ emptyBody := body == nil || len(node.Body.List) == 0
+ if emptyBody {
return nil
}
diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
index 6a0748011..23474b5ee 100644
--- a/vendor/github.com/mgechev/revive/rule/function-result-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
@@ -19,20 +19,24 @@ const defaultResultsLimit = 3
func (r *FunctionResultsLimitRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.max == 0 {
- if len(arguments) < 1 {
- r.max = defaultResultsLimit
- return
- }
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0]))
- }
- if max < 0 {
- panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`)
- }
- r.max = int(max)
+ if r.max != 0 {
+ return // already configured
}
+
+ if len(arguments) < 1 {
+ r.max = defaultResultsLimit
+ return
+ }
+
+ maxResults, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0]))
+ }
+ if maxResults < 0 {
+ panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`)
+ }
+
+ r.max = int(maxResults)
}
// Apply applies the rule to given file.
@@ -67,7 +71,8 @@ func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor {
node, ok := n.(*ast.FuncDecl)
if ok {
num := 0
- if node.Type.Results != nil {
+ hasResults := node.Type.Results != nil
+ if hasResults {
num = node.Type.Results.NumFields()
}
if num > w.max {
@@ -76,8 +81,10 @@ func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor {
Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num),
Node: node.Type,
})
- return w
}
+
+ return nil // skip visiting function's body
}
+
return w
}
diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go
index 600a40fac..06323a087 100644
--- a/vendor/github.com/mgechev/revive/rule/get-return.go
+++ b/vendor/github.com/mgechev/revive/rule/get-return.go
@@ -33,15 +33,25 @@ type lintReturnRule struct {
onFailure func(lint.Failure)
}
+const getterPrefix = "GET"
+
+var lenGetterPrefix = len(getterPrefix)
+
func isGetter(name string) bool {
- if strings.HasPrefix(strings.ToUpper(name), "GET") {
- if len(name) > 3 {
- c := name[3]
- return !(c >= 'a' && c <= 'z')
- }
+ nameHasGetterPrefix := strings.HasPrefix(strings.ToUpper(name), getterPrefix)
+ if !nameHasGetterPrefix {
+ return false
}
- return false
+ isJustGet := len(name) == lenGetterPrefix
+ if isJustGet {
+ return false
+ }
+
+ c := name[lenGetterPrefix]
+ lowerCaseAfterGetterPrefix := c >= 'a' && c <= 'z'
+
+ return !lowerCaseAfterGetterPrefix
}
func hasResults(rs *ast.FieldList) bool {
diff --git a/vendor/github.com/mgechev/revive/rule/identical-branches.go b/vendor/github.com/mgechev/revive/rule/identical-branches.go
index 9222c8a9c..c6008925f 100644
--- a/vendor/github.com/mgechev/revive/rule/identical-branches.go
+++ b/vendor/github.com/mgechev/revive/rule/identical-branches.go
@@ -39,9 +39,11 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor {
return w
}
- if n.Else == nil {
+ noElseBranch := n.Else == nil
+ if noElseBranch {
return w
}
+
branches := []*ast.BlockStmt{n.Body}
elseBranch, ok := n.Else.(*ast.BlockStmt)
@@ -59,14 +61,15 @@ func (w *lintIdenticalBranches) Visit(node ast.Node) ast.Visitor {
func (lintIdenticalBranches) identicalBranches(branches []*ast.BlockStmt) bool {
if len(branches) < 2 {
- return false
+ return false // only one branch to compare thus we return
}
- ref := gofmt(branches[0])
- refSize := len(branches[0].List)
+ referenceBranch := gofmt(branches[0])
+ referenceBranchSize := len(branches[0].List)
for i := 1; i < len(branches); i++ {
- currentSize := len(branches[i].List)
- if currentSize != refSize || gofmt(branches[i]) != ref {
+ currentBranch := branches[i]
+ currentBranchSize := len(currentBranch.List)
+ if currentBranchSize != referenceBranchSize || gofmt(currentBranch) != referenceBranch {
return false
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
index a6d096c8b..48d22566a 100644
--- a/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/import-alias-naming.go
@@ -68,7 +68,7 @@ func (r *ImportAliasNamingRule) Apply(file *lint.File, arguments lint.Arguments)
}
alias := is.Name
- if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aiases and should be processed by another linter rule
+ if alias == nil || alias.Name == "_" || alias.Name == "." { // "_" and "." are special types of import aliases and should be processed by another linter rule
continue
}
diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
index 294ceef84..ebc1e793a 100644
--- a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
+++ b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
@@ -18,27 +18,27 @@ func (*IndentErrorFlowRule) Name() string {
return "indent-error-flow"
}
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable.
+func (*IndentErrorFlowRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string {
if !chain.If.Deviates() {
// this rule only applies if the if-block deviates control flow
- return
+ return ""
}
if chain.HasPriorNonDeviating {
// if we de-indent the "else" block then a previous branch
// might flow into it, affecting program behaviour
- return
+ return ""
}
if !chain.If.Returns() {
// avoid overlapping with superfluous-else
- return
+ return ""
}
if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
// avoid increasing variable scope
- return
+ return ""
}
return "if block ends with a return statement, so drop this else and outdent its block"
diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
index 1a414f691..a154b7aec 100644
--- a/vendor/github.com/mgechev/revive/rule/line-length-limit.go
+++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
@@ -23,19 +23,21 @@ const defaultLineLengthLimit = 80
func (r *LineLengthLimitRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.max == 0 {
- if len(arguments) < 1 {
- r.max = defaultLineLengthLimit
- return
- }
+ if r.max != 0 {
+ return // already configured
+ }
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok || max < 0 {
- panic(`invalid value passed as argument number to the "line-length-limit" rule`)
- }
+ if len(arguments) < 1 {
+ r.max = defaultLineLengthLimit
+ return
+ }
- r.max = int(max)
+ maxLength, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok || maxLength < 0 {
+ panic(`invalid value passed as argument number to the "line-length-limit" rule`)
}
+
+ r.max = int(maxLength)
}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
index c4eb36193..5dbb1eefa 100644
--- a/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
+++ b/vendor/github.com/mgechev/revive/rule/max-control-nesting.go
@@ -110,7 +110,7 @@ func (r *MaxControlNestingRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
if !(r.max < 1) {
- return // max already set
+ return // max already configured
}
if len(arguments) < 1 {
@@ -120,9 +120,9 @@ func (r *MaxControlNestingRule) configure(arguments lint.Arguments) {
checkNumberOfArguments(1, arguments, r.Name())
- max, ok := arguments[0].(int64) // Alt. non panicking version
+ maxNesting, ok := arguments[0].(int64) // Alt. non panicking version
if !ok {
panic(`invalid value passed as argument number to the "max-control-nesting" rule`)
}
- r.max = max
+ r.max = maxNesting
}
diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
index 25be3e676..70840e734 100644
--- a/vendor/github.com/mgechev/revive/rule/max-public-structs.go
+++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
@@ -19,20 +19,22 @@ const defaultMaxPublicStructs = 5
func (r *MaxPublicStructsRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.max < 1 {
- if len(arguments) < 1 {
- r.max = defaultMaxPublicStructs
- return
- }
+ if r.max == 0 {
+ return // already configured
+ }
- checkNumberOfArguments(1, arguments, r.Name())
+ if len(arguments) < 1 {
+ r.max = defaultMaxPublicStructs
+ return
+ }
- max, ok := arguments[0].(int64) // Alt. non panicking version
- if !ok {
- panic(`invalid value passed as argument number to the "max-public-structs" rule`)
- }
- r.max = max
+ checkNumberOfArguments(1, arguments, r.Name())
+
+ maxStructs, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(`invalid value passed as argument number to the "max-public-structs" rule`)
}
+ r.max = maxStructs
}
// Apply applies the rule to given file.
diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go
index d79bb9fe8..afcd99b8f 100644
--- a/vendor/github.com/mgechev/revive/rule/receiver-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/receiver-naming.go
@@ -3,16 +3,55 @@ package rule
import (
"fmt"
"go/ast"
+ "sync"
"github.com/mgechev/revive/internal/typeparams"
"github.com/mgechev/revive/lint"
)
// ReceiverNamingRule lints given else constructs.
-type ReceiverNamingRule struct{}
+type ReceiverNamingRule struct {
+ receiverNameMaxLength int
+ sync.Mutex
+}
+
+const defaultReceiverNameMaxLength = -1 // thus will not check
+
+func (r *ReceiverNamingRule) configure(arguments lint.Arguments) {
+ r.Lock()
+ defer r.Unlock()
+ if r.receiverNameMaxLength != 0 {
+ return
+ }
+
+ r.receiverNameMaxLength = defaultReceiverNameMaxLength
+ if len(arguments) < 1 {
+ return
+ }
+
+ args, ok := arguments[0].(map[string]any)
+ if !ok {
+ panic(fmt.Sprintf("Unable to get arguments for rule %s. Expected object of key-value-pairs.", r.Name()))
+ }
+
+ for k, v := range args {
+ switch k {
+ case "maxLength":
+ value, ok := v.(int64)
+ if !ok {
+ panic(fmt.Sprintf("Invalid value %v for argument %s of rule %s, expected integer value got %T", v, k, r.Name(), v))
+ }
+ r.receiverNameMaxLength = int(value)
+ default:
+ panic(fmt.Sprintf("Unknown argument %s for %s rule.", k, r.Name()))
+ }
+ }
+}
// Apply applies the rule to given file.
-func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+func (r *ReceiverNamingRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+ r.configure(args)
+
var failures []lint.Failure
fileAst := file.AST
@@ -20,7 +59,8 @@ func (*ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failu
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- typeReceiver: map[string]string{},
+ typeReceiver: map[string]string{},
+ receiverNameMaxLength: r.receiverNameMaxLength,
}
ast.Walk(walker, fileAst)
@@ -34,8 +74,9 @@ func (*ReceiverNamingRule) Name() string {
}
type lintReceiverName struct {
- onFailure func(lint.Failure)
- typeReceiver map[string]string
+ onFailure func(lint.Failure)
+ typeReceiver map[string]string
+ receiverNameMaxLength int
}
func (w lintReceiverName) Visit(n ast.Node) ast.Visitor {
@@ -66,6 +107,17 @@ func (w lintReceiverName) Visit(n ast.Node) ast.Visitor {
})
return w
}
+
+ if w.receiverNameMaxLength > 0 && len([]rune(name)) > w.receiverNameMaxLength {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Confidence: 1,
+ Category: "naming",
+ Failure: fmt.Sprintf("receiver name %s is longer than %d characters", name, w.receiverNameMaxLength),
+ })
+ return w
+ }
+
recv := typeparams.ReceiverType(fn)
if prev, ok := w.typeReceiver[recv]; ok && prev != name {
w.onFailure(lint.Failure{
diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
index b3ff08456..10ea16ae1 100644
--- a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
+++ b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
@@ -4,6 +4,7 @@ import (
"fmt"
"go/ast"
"go/token"
+ "maps"
"github.com/mgechev/revive/lint"
)
@@ -33,6 +34,12 @@ var builtFunctions = map[string]bool{
"recover": true,
}
+var builtFunctionsAfterGo121 = map[string]bool{
+ "clear": true,
+ "max": true,
+ "min": true,
+}
+
var builtInTypes = map[string]bool{
"bool": true,
"byte": true,
@@ -69,7 +76,17 @@ func (*RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.F
}
astFile := file.AST
- w := &lintRedefinesBuiltinID{onFailure}
+
+ builtFuncs := maps.Clone(builtFunctions)
+ if file.Pkg.IsAtLeastGo121() {
+ maps.Copy(builtFuncs, builtFunctionsAfterGo121)
+ }
+ w := &lintRedefinesBuiltinID{
+ onFailure: onFailure,
+ builtInConstAndVars: builtInConstAndVars,
+ builtFunctions: builtFuncs,
+ builtInTypes: builtInTypes,
+ }
ast.Walk(w, astFile)
return failures
@@ -81,7 +98,10 @@ func (*RedefinesBuiltinIDRule) Name() string {
}
type lintRedefinesBuiltinID struct {
- onFailure func(lint.Failure)
+ onFailure func(lint.Failure)
+ builtInConstAndVars map[string]bool
+ builtFunctions map[string]bool
+ builtInTypes map[string]bool
}
func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
@@ -125,6 +145,31 @@ func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
if ok, bt := w.isBuiltIn(id); ok {
w.addFailure(n, fmt.Sprintf("redefinition of the built-in %s %s", bt, id))
}
+ case *ast.FuncType:
+ var fields []*ast.Field
+ if n.TypeParams != nil {
+ fields = append(fields, n.TypeParams.List...)
+ }
+ if n.Params != nil {
+ fields = append(fields, n.Params.List...)
+ }
+ if n.Results != nil {
+ fields = append(fields, n.Results.List...)
+ }
+ for _, field := range fields {
+ for _, name := range field.Names {
+ obj := name.Obj
+ isTypeOrName := obj != nil && (obj.Kind == ast.Var || obj.Kind == ast.Typ)
+ if !isTypeOrName {
+ continue
+ }
+
+ id := obj.Name
+ if ok, bt := w.isBuiltIn(id); ok {
+ w.addFailure(name, fmt.Sprintf("redefinition of the built-in %s %s", bt, id))
+ }
+ }
+ }
case *ast.AssignStmt:
for _, e := range n.Lhs {
id, ok := e.(*ast.Ident)
@@ -162,16 +207,16 @@ func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) {
})
}
-func (lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) {
- if builtFunctions[id] {
+func (w *lintRedefinesBuiltinID) isBuiltIn(id string) (r bool, builtInKind string) {
+ if w.builtFunctions[id] {
return true, "function"
}
- if builtInConstAndVars[id] {
+ if w.builtInConstAndVars[id] {
return true, "constant or variable"
}
- if builtInTypes[id] {
+ if w.builtInTypes[id] {
return true, "type"
}
diff --git a/vendor/github.com/mgechev/revive/rule/string-format.go b/vendor/github.com/mgechev/revive/rule/string-format.go
index 70edf7387..ecac3fa7c 100644
--- a/vendor/github.com/mgechev/revive/rule/string-format.go
+++ b/vendor/github.com/mgechev/revive/rule/string-format.go
@@ -6,6 +6,7 @@ import (
"go/token"
"regexp"
"strconv"
+ "strings"
"github.com/mgechev/revive/lint"
)
@@ -66,12 +67,14 @@ type lintStringFormatRule struct {
type stringFormatSubrule struct {
parent *lintStringFormatRule
- scope stringFormatSubruleScope
+ scopes stringFormatSubruleScopes
regexp *regexp.Regexp
negated bool
errorMessage string
}
+type stringFormatSubruleScopes []*stringFormatSubruleScope
+
type stringFormatSubruleScope struct {
funcName string // Function name the rule is scoped to
argument int // (optional) Which argument in calls to the function is checked against the rule (the first argument is checked by default)
@@ -90,10 +93,10 @@ var parseStringFormatScope = regexp.MustCompile(
func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) {
for i, argument := range arguments {
- scope, regex, negated, errorMessage := w.parseArgument(argument, i)
+ scopes, regex, negated, errorMessage := w.parseArgument(argument, i)
w.rules = append(w.rules, stringFormatSubrule{
parent: w,
- scope: scope,
+ scopes: scopes,
regexp: regex,
negated: negated,
errorMessage: errorMessage,
@@ -101,7 +104,7 @@ func (w *lintStringFormatRule) parseArguments(arguments lint.Arguments) {
}
}
-func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope stringFormatSubruleScope, regex *regexp.Regexp, negated bool, errorMessage string) {
+func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scopes stringFormatSubruleScopes, regex *regexp.Regexp, negated bool, errorMessage string) {
g, ok := argument.([]any) // Cast to generic slice first
if !ok {
w.configError("argument is not a slice", ruleNum, 0)
@@ -125,26 +128,39 @@ func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope st
w.configError("regex is too small (regexes should begin and end with '/')", ruleNum, 1)
}
- // Parse rule scope
- scope = stringFormatSubruleScope{}
- matches := parseStringFormatScope.FindStringSubmatch(rule[0])
- if matches == nil {
- // The rule's scope didn't match the parsing regex at all, probably a configuration error
- w.parseError("unable to parse rule scope", ruleNum, 0)
- } else if len(matches) != 4 {
- // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug
- w.parseError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0)
- }
- scope.funcName = matches[1]
- if len(matches[2]) > 0 {
- var err error
- scope.argument, err = strconv.Atoi(matches[2])
- if err != nil {
- w.parseError("unable to parse argument number in rule scope", ruleNum, 0)
+ // Parse rule scopes
+ rawScopes := strings.Split(rule[0], ",")
+
+ scopes = make([]*stringFormatSubruleScope, 0, len(rawScopes))
+ for scopeNum, rawScope := range rawScopes {
+ rawScope = strings.TrimSpace(rawScope)
+
+ if len(rawScope) == 0 {
+ w.parseScopeError("empty scope in rule scopes:", ruleNum, 0, scopeNum)
}
- }
- if len(matches[3]) > 0 {
- scope.field = matches[3]
+
+ scope := stringFormatSubruleScope{}
+ matches := parseStringFormatScope.FindStringSubmatch(rawScope)
+ if matches == nil {
+ // The rule's scope didn't match the parsing regex at all, probably a configuration error
+ w.parseScopeError("unable to parse rule scope", ruleNum, 0, scopeNum)
+ } else if len(matches) != 4 {
+ // The rule's scope matched the parsing regex, but an unexpected number of submatches was returned, probably a bug
+ w.parseScopeError(fmt.Sprintf("unexpected number of submatches when parsing scope: %d, expected 4", len(matches)), ruleNum, 0, scopeNum)
+ }
+ scope.funcName = matches[1]
+ if len(matches[2]) > 0 {
+ var err error
+ scope.argument, err = strconv.Atoi(matches[2])
+ if err != nil {
+ w.parseScopeError("unable to parse argument number in rule scope", ruleNum, 0, scopeNum)
+ }
+ }
+ if len(matches[3]) > 0 {
+ scope.field = matches[3]
+ }
+
+ scopes = append(scopes, &scope)
}
// Strip / characters from the beginning and end of rule[1] before compiling
@@ -162,7 +178,7 @@ func (w lintStringFormatRule) parseArgument(argument any, ruleNum int) (scope st
if len(rule) == 3 {
errorMessage = rule[2]
}
- return scope, regex, negated, errorMessage
+ return scopes, regex, negated, errorMessage
}
// Report an invalid config, this is specifically the user's fault
@@ -175,6 +191,11 @@ func (lintStringFormatRule) parseError(msg string, ruleNum, option int) {
panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d]", msg, ruleNum, option))
}
+// Report a general scope config parsing failure, this may be the user's fault, but it isn't known for certain
+func (lintStringFormatRule) parseScopeError(msg string, ruleNum, option, scopeNum int) {
+ panic(fmt.Sprintf("failed to parse configuration for string-format: %s [argument %d, option %d, scope index %d]", msg, ruleNum, option, scopeNum))
+}
+
// #endregion
// #region Node traversal
@@ -193,8 +214,10 @@ func (w lintStringFormatRule) Visit(node ast.Node) ast.Visitor {
}
for _, rule := range w.rules {
- if rule.scope.funcName == callName {
- rule.Apply(call)
+ for _, scope := range rule.scopes {
+ if scope.funcName == callName {
+ rule.apply(call, scope)
+ }
}
}
@@ -228,15 +251,15 @@ func (lintStringFormatRule) getCallName(call *ast.CallExpr) (callName string, ok
// #region Linting logic
-// Apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope)
-func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
- if len(call.Args) <= r.scope.argument {
+// apply a single format rule to a call expression (should be done after verifying the that the call expression matches the rule's scope)
+func (r *stringFormatSubrule) apply(call *ast.CallExpr, scope *stringFormatSubruleScope) {
+ if len(call.Args) <= scope.argument {
return
}
- arg := call.Args[r.scope.argument]
+ arg := call.Args[scope.argument]
var lit *ast.BasicLit
- if len(r.scope.field) > 0 {
+ if len(scope.field) > 0 {
// Try finding the scope's Field, treating arg as a composite literal
composite, ok := arg.(*ast.CompositeLit)
if !ok {
@@ -248,7 +271,7 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
continue
}
key, ok := kv.Key.(*ast.Ident)
- if !ok || key.Name != r.scope.field {
+ if !ok || key.Name != scope.field {
continue
}
@@ -268,39 +291,33 @@ func (r *stringFormatSubrule) Apply(call *ast.CallExpr) {
}
// Unquote the string literal before linting
unquoted := lit.Value[1 : len(lit.Value)-1]
- r.lintMessage(unquoted, lit)
+ if r.stringIsOK(unquoted) {
+ return
+ }
+
+ r.generateFailure(lit)
}
-func (r *stringFormatSubrule) lintMessage(s string, node ast.Node) {
+func (r *stringFormatSubrule) stringIsOK(s string) bool {
+ matches := r.regexp.MatchString(s)
if r.negated {
- if !r.regexp.MatchString(s) {
- return
- }
- // Fail if the string does match the user's regex
- var failure string
- if len(r.errorMessage) > 0 {
- failure = r.errorMessage
- } else {
- failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String())
- }
- r.parent.onFailure(lint.Failure{
- Confidence: 1,
- Failure: failure,
- Node: node,
- })
- return
+ return !matches
}
- // Fail if the string does NOT match the user's regex
- if r.regexp.MatchString(s) {
- return
- }
+ return matches
+}
+
+func (r *stringFormatSubrule) generateFailure(node ast.Node) {
var failure string
- if len(r.errorMessage) > 0 {
+ switch {
+ case len(r.errorMessage) > 0:
failure = r.errorMessage
- } else {
+ case r.negated:
+ failure = fmt.Sprintf("string literal matches user defined regex /%s/", r.regexp.String())
+ case !r.negated:
failure = fmt.Sprintf("string literal doesn't match user defined regex /%s/", r.regexp.String())
}
+
r.parent.onFailure(lint.Failure{
Confidence: 1,
Failure: failure,
diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go
index f6ee47a73..ec3f0c7cf 100644
--- a/vendor/github.com/mgechev/revive/rule/struct-tag.go
+++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go
@@ -20,23 +20,27 @@ type StructTagRule struct {
func (r *StructTagRule) configure(arguments lint.Arguments) {
r.Lock()
defer r.Unlock()
- if r.userDefined == nil && len(arguments) > 0 {
- checkNumberOfArguments(1, arguments, r.Name())
- r.userDefined = make(map[string][]string, len(arguments))
- for _, arg := range arguments {
- item, ok := arg.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg))
- }
- parts := strings.Split(item, ",")
- if len(parts) < 2 {
- panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item))
- }
- key := strings.TrimSpace(parts[0])
- for i := 1; i < len(parts); i++ {
- option := strings.TrimSpace(parts[i])
- r.userDefined[key] = append(r.userDefined[key], option)
- }
+
+ mustConfigure := r.userDefined == nil && len(arguments) > 0
+ if !mustConfigure {
+ return
+ }
+
+ checkNumberOfArguments(1, arguments, r.Name())
+ r.userDefined = make(map[string][]string, len(arguments))
+ for _, arg := range arguments {
+ item, ok := arg.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string, got %v (of type %T)", r.Name(), arg, arg))
+ }
+ parts := strings.Split(item, ",")
+ if len(parts) < 2 {
+ panic(fmt.Sprintf("Invalid argument to the %s rule. Expecting a string of the form key[,option]+, got %s", r.Name(), item))
+ }
+ key := strings.TrimSpace(parts[0])
+ for i := 1; i < len(parts); i++ {
+ option := strings.TrimSpace(parts[i])
+ r.userDefined[key] = append(r.userDefined[key], option)
}
}
}
@@ -75,11 +79,13 @@ type lintStructTagRule struct {
func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor {
switch n := node.(type) {
case *ast.StructType:
- if n.Fields == nil || n.Fields.NumFields() < 1 {
+ isEmptyStruct := n.Fields == nil || n.Fields.NumFields() < 1
+ if isEmptyStruct {
return nil // skip empty structs
}
- w.usedTagNbr = map[int]bool{} // init
- w.usedTagName = map[string]bool{} // init
+
+ w.usedTagNbr = map[int]bool{}
+ w.usedTagName = map[string]bool{}
for _, f := range n.Fields.List {
if f.Tag != nil {
w.checkTaggedField(f)
diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
index 2aa1b6b2c..18e8f3bdd 100644
--- a/vendor/github.com/mgechev/revive/rule/superfluous-else.go
+++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
@@ -2,6 +2,7 @@ package rule
import (
"fmt"
+
"github.com/mgechev/revive/internal/ifelse"
"github.com/mgechev/revive/lint"
)
@@ -19,27 +20,27 @@ func (*SuperfluousElseRule) Name() string {
return "superfluous-else"
}
-// CheckIfElse evaluates the rule against an ifelse.Chain.
-func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) (failMsg string) {
+// CheckIfElse evaluates the rule against an ifelse.Chain and returns a failure message if applicable.
+func (*SuperfluousElseRule) CheckIfElse(chain ifelse.Chain, args ifelse.Args) string {
if !chain.If.Deviates() {
// this rule only applies if the if-block deviates control flow
- return
+ return ""
}
if chain.HasPriorNonDeviating {
// if we de-indent the "else" block then a previous branch
// might flow into it, affecting program behaviour
- return
+ return ""
}
if chain.If.Returns() {
// avoid overlapping with indent-error-flow
- return
+ return ""
}
if args.PreserveScope && !chain.AtBlockEnd && (chain.HasInitializer || chain.Else.HasDecls) {
// avoid increasing variable scope
- return
+ return ""
}
return fmt.Sprintf("if block ends with %v, so drop this else and outdent its block", chain.If.LongString())
diff --git a/vendor/github.com/mgechev/revive/rule/time-equal.go b/vendor/github.com/mgechev/revive/rule/time-equal.go
index 3b85e18a8..a4fab88b3 100644
--- a/vendor/github.com/mgechev/revive/rule/time-equal.go
+++ b/vendor/github.com/mgechev/revive/rule/time-equal.go
@@ -50,26 +50,23 @@ func (l *lintTimeEqual) Visit(node ast.Node) ast.Visitor {
return l
}
- xtyp := l.file.Pkg.TypeOf(expr.X)
- ytyp := l.file.Pkg.TypeOf(expr.Y)
-
- if !isNamedType(xtyp, "time", "Time") || !isNamedType(ytyp, "time", "Time") {
+ typeOfX := l.file.Pkg.TypeOf(expr.X)
+ typeOfY := l.file.Pkg.TypeOf(expr.Y)
+ bothAreOfTimeType := isNamedType(typeOfX, "time", "Time") && isNamedType(typeOfY, "time", "Time")
+ if !bothAreOfTimeType {
return l
}
- var failure string
- switch expr.Op {
- case token.EQL:
- failure = fmt.Sprintf("use %s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
- case token.NEQ:
- failure = fmt.Sprintf("use !%s.Equal(%s) instead of %q operator", gofmt(expr.X), gofmt(expr.Y), expr.Op)
+ negateStr := ""
+ if token.NEQ == expr.Op {
+ negateStr = "!"
}
l.onFailure(lint.Failure{
Category: "time",
Confidence: 1,
Node: node,
- Failure: failure,
+ Failure: fmt.Sprintf("use %s%s.Equal(%s) instead of %q operator", negateStr, gofmt(expr.X), gofmt(expr.Y), expr.Op),
})
return l
diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time-naming.go
index cea452e61..5bccf8a7a 100644
--- a/vendor/github.com/mgechev/revive/rule/time-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/time-naming.go
@@ -90,6 +90,7 @@ func isNamedType(typ types.Type, importPath, name string) bool {
if !ok {
return false
}
- tn := n.Obj()
- return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name
+
+ typeName := n.Obj()
+ return typeName != nil && typeName.Pkg() != nil && typeName.Pkg().Path() == importPath && typeName.Name() == name
}
diff --git a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
index df27743cb..eea344060 100644
--- a/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
+++ b/vendor/github.com/mgechev/revive/rule/unchecked-type-assertion.go
@@ -13,7 +13,7 @@ const (
ruleUTAMessageIgnored = "type assertion result ignored"
)
-// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in danymic type casts.
+// UncheckedTypeAssertionRule lints missing or ignored `ok`-value in dynamic type casts.
type UncheckedTypeAssertionRule struct {
sync.Mutex
acceptIgnoredAssertionResult bool
@@ -54,7 +54,7 @@ func (u *UncheckedTypeAssertionRule) Apply(file *lint.File, args lint.Arguments)
var failures []lint.Failure
- walker := &lintUnchekedTypeAssertion{
+ walker := &lintUncheckedTypeAssertion{
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
@@ -71,7 +71,7 @@ func (*UncheckedTypeAssertionRule) Name() string {
return "unchecked-type-assertion"
}
-type lintUnchekedTypeAssertion struct {
+type lintUncheckedTypeAssertion struct {
onFailure func(lint.Failure)
acceptIgnoredTypeAssertionResult bool
}
@@ -89,14 +89,14 @@ func isTypeSwitch(e *ast.TypeAssertExpr) bool {
return e.Type == nil
}
-func (w *lintUnchekedTypeAssertion) requireNoTypeAssert(expr ast.Expr) {
+func (w *lintUncheckedTypeAssertion) requireNoTypeAssert(expr ast.Expr) {
e, ok := expr.(*ast.TypeAssertExpr)
if ok && !isTypeSwitch(e) {
w.addFailure(e, ruleUTAMessagePanic)
}
}
-func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
+func (w *lintUncheckedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
ifCondition, ok := n.Cond.(*ast.BinaryExpr)
if ok {
w.requireNoTypeAssert(ifCondition.X)
@@ -104,7 +104,7 @@ func (w *lintUnchekedTypeAssertion) handleIfStmt(n *ast.IfStmt) {
}
}
-func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) {
+func (w *lintUncheckedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(expr ast.Expr) {
binaryExpr, ok := expr.(*ast.BinaryExpr)
if ok {
w.requireNoTypeAssert(binaryExpr.X)
@@ -112,19 +112,19 @@ func (w *lintUnchekedTypeAssertion) requireBinaryExpressionWithoutTypeAssertion(
}
}
-func (w *lintUnchekedTypeAssertion) handleCaseClause(n *ast.CaseClause) {
+func (w *lintUncheckedTypeAssertion) handleCaseClause(n *ast.CaseClause) {
for _, expr := range n.List {
w.requireNoTypeAssert(expr)
w.requireBinaryExpressionWithoutTypeAssertion(expr)
}
}
-func (w *lintUnchekedTypeAssertion) handleSwitch(n *ast.SwitchStmt) {
+func (w *lintUncheckedTypeAssertion) handleSwitch(n *ast.SwitchStmt) {
w.requireNoTypeAssert(n.Tag)
w.requireBinaryExpressionWithoutTypeAssertion(n.Tag)
}
-func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
+func (w *lintUncheckedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
if len(n.Rhs) == 0 {
return
}
@@ -148,21 +148,21 @@ func (w *lintUnchekedTypeAssertion) handleAssignment(n *ast.AssignStmt) {
}
// handles "return foo(.*bar)" - one of them is enough to fail as golang does not forward the type cast tuples in return statements
-func (w *lintUnchekedTypeAssertion) handleReturn(n *ast.ReturnStmt) {
+func (w *lintUncheckedTypeAssertion) handleReturn(n *ast.ReturnStmt) {
for _, r := range n.Results {
w.requireNoTypeAssert(r)
}
}
-func (w *lintUnchekedTypeAssertion) handleRange(n *ast.RangeStmt) {
+func (w *lintUncheckedTypeAssertion) handleRange(n *ast.RangeStmt) {
w.requireNoTypeAssert(n.X)
}
-func (w *lintUnchekedTypeAssertion) handleChannelSend(n *ast.SendStmt) {
+func (w *lintUncheckedTypeAssertion) handleChannelSend(n *ast.SendStmt) {
w.requireNoTypeAssert(n.Value)
}
-func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor {
+func (w *lintUncheckedTypeAssertion) Visit(node ast.Node) ast.Visitor {
switch n := node.(type) {
case *ast.RangeStmt:
w.handleRange(n)
@@ -183,7 +183,7 @@ func (w *lintUnchekedTypeAssertion) Visit(node ast.Node) ast.Visitor {
return w
}
-func (w *lintUnchekedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) {
+func (w *lintUncheckedTypeAssertion) addFailure(n *ast.TypeAssertExpr, why string) {
s := fmt.Sprintf("type cast result is unchecked in %v - %s", gofmt(n), why)
w.onFailure(lint.Failure{
Category: "bad practice",
diff --git a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
index 9ac2648cd..d806b6757 100644
--- a/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
+++ b/vendor/github.com/mgechev/revive/rule/unconditional-recursion.go
@@ -185,9 +185,10 @@ func (lintUnconditionalRecursionRule) hasControlExit(node ast.Node) bool {
return false
}
- fn := se.Sel.Name
- pkg := id.Name
- if exitFunctions[pkg] != nil && exitFunctions[pkg][fn] { // it's a call to an exit function
+ functionName := se.Sel.Name
+ pkgName := id.Name
+ isCallToExitFunction := exitFunctions[pkgName] != nil && exitFunctions[pkgName][functionName]
+ if isCallToExitFunction {
return true
}
}
diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
index ce6fa3864..95ba56180 100644
--- a/vendor/github.com/mgechev/revive/rule/unhandled-error.go
+++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
@@ -19,27 +19,30 @@ type UnhandledErrorRule struct {
func (r *UnhandledErrorRule) configure(arguments lint.Arguments) {
r.Lock()
- if r.ignoreList == nil {
- for _, arg := range arguments {
- argStr, ok := arg.(string)
- if !ok {
- panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg))
- }
+ defer r.Unlock()
- argStr = strings.Trim(argStr, " ")
- if argStr == "" {
- panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.")
- }
+ if r.ignoreList != nil {
+ return // already configured
+ }
- exp, err := regexp.Compile(argStr)
- if err != nil {
- panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err))
- }
+ for _, arg := range arguments {
+ argStr, ok := arg.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg))
+ }
- r.ignoreList = append(r.ignoreList, exp)
+ argStr = strings.Trim(argStr, " ")
+ if argStr == "" {
+ panic("Invalid argument to the unhandled-error rule, expected regular expression must not be empty.")
}
+
+ exp, err := regexp.Compile(argStr)
+ if err != nil {
+ panic(fmt.Sprintf("Invalid argument to the unhandled-error rule: regexp %q does not compile: %v", argStr, err))
+ }
+
+ r.ignoreList = append(r.ignoreList, exp)
}
- r.Unlock()
}
// Apply applies the rule to given file.
@@ -130,9 +133,9 @@ func (w *lintUnhandledErrors) funcName(call *ast.CallExpr) string {
}
name := fn.FullName()
- name = strings.Replace(name, "(", "", -1)
- name = strings.Replace(name, ")", "", -1)
- name = strings.Replace(name, "*", "", -1)
+ name = strings.ReplaceAll(name, "(", "")
+ name = strings.ReplaceAll(name, ")", "")
+ name = strings.ReplaceAll(name, "*", "")
return name
}
diff --git a/vendor/github.com/mgechev/revive/rule/use-any.go b/vendor/github.com/mgechev/revive/rule/use-any.go
index bdf3c936d..88160c2fa 100644
--- a/vendor/github.com/mgechev/revive/rule/use-any.go
+++ b/vendor/github.com/mgechev/revive/rule/use-any.go
@@ -47,7 +47,7 @@ func (w lintUseAny) Visit(n ast.Node) ast.Visitor {
Node: n,
Confidence: 1,
Category: "naming",
- Failure: "since GO 1.18 'interface{}' can be replaced by 'any'",
+ Failure: "since Go 1.18 'interface{}' can be replaced by 'any'",
})
return w
diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var-declarations.go
index a15ff1eb4..3f9d7068a 100644
--- a/vendor/github.com/mgechev/revive/rule/var-declarations.go
+++ b/vendor/github.com/mgechev/revive/rule/var-declarations.go
@@ -46,13 +46,15 @@ type lintVarDeclarations struct {
func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
switch v := node.(type) {
case *ast.GenDecl:
- if v.Tok != token.CONST && v.Tok != token.VAR {
+ isVarOrConstDeclaration := v.Tok == token.CONST || v.Tok == token.VAR
+ if !isVarOrConstDeclaration {
return nil
}
w.lastGen = v
return w
case *ast.ValueSpec:
- if w.lastGen.Tok == token.CONST {
+ isConstDeclaration := w.lastGen.Tok == token.CONST
+ if isConstDeclaration {
return nil
}
if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 {
@@ -64,14 +66,14 @@ func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
if isIdent(v.Names[0], "_") {
return nil
}
- // If the RHS is a zero value, suggest dropping it.
- zero := false
+ // If the RHS is a isZero value, suggest dropping it.
+ isZero := false
if lit, ok := rhs.(*ast.BasicLit); ok {
- zero = zeroLiteral[lit.Value]
+ isZero = zeroLiteral[lit.Value]
} else if isIdent(rhs, "nil") {
- zero = true
+ isZero = true
}
- if zero {
+ if isZero {
w.onFailure(lint.Failure{
Confidence: 0.9,
Node: rhs,
diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go
index e91c22dc2..5a4d0dc24 100644
--- a/vendor/github.com/mgechev/revive/rule/var-naming.go
+++ b/vendor/github.com/mgechev/revive/rule/var-naming.go
@@ -19,9 +19,9 @@ var upperCaseConstRE = regexp.MustCompile(`^_?[A-Z][A-Z\d]*(_[A-Z\d]+)*$`)
// VarNamingRule lints given else constructs.
type VarNamingRule struct {
configured bool
- allowlist []string
- blocklist []string
- upperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
+ allowList []string
+ blockList []string
+ allowUpperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
skipPackageNameChecks bool
sync.Mutex
}
@@ -35,11 +35,11 @@ func (r *VarNamingRule) configure(arguments lint.Arguments) {
r.configured = true
if len(arguments) >= 1 {
- r.allowlist = getList(arguments[0], "allowlist")
+ r.allowList = getList(arguments[0], "allowlist")
}
if len(arguments) >= 2 {
- r.blocklist = getList(arguments[1], "blocklist")
+ r.blockList = getList(arguments[1], "blocklist")
}
if len(arguments) >= 3 {
@@ -56,7 +56,7 @@ func (r *VarNamingRule) configure(arguments lint.Arguments) {
if !ok {
panic(fmt.Sprintf("Invalid third argument to the var-naming rule. Expecting a %s of type slice, of len==1, with map, but %T", "options", asSlice[0]))
}
- r.upperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true"
+ r.allowUpperCaseConst = fmt.Sprint(args["upperCaseConst"]) == "true"
r.skipPackageNameChecks = fmt.Sprint(args["skipPackageNameChecks"]) == "true"
}
}
@@ -79,7 +79,6 @@ func (r *VarNamingRule) applyPackageCheckRules(walker *lintNames) {
Category: "naming",
})
}
-
}
// Apply applies the rule to given file.
@@ -93,12 +92,12 @@ func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.
walker := lintNames{
file: file,
fileAst: fileAst,
- allowlist: r.allowlist,
- blocklist: r.blocklist,
+ allowList: r.allowList,
+ blockList: r.blockList,
onFailure: func(failure lint.Failure) {
failures = append(failures, failure)
},
- upperCaseConst: r.upperCaseConst,
+ upperCaseConst: r.allowUpperCaseConst,
}
if !r.skipPackageNameChecks {
@@ -151,7 +150,7 @@ func (w *lintNames) check(id *ast.Ident, thing string) {
return
}
- should := lint.Name(id.Name, w.allowlist, w.blocklist)
+ should := lint.Name(id.Name, w.allowList, w.blockList)
if id.Name == should {
return
}
@@ -177,8 +176,8 @@ type lintNames struct {
file *lint.File
fileAst *ast.File
onFailure func(lint.Failure)
- allowlist []string
- blocklist []string
+ allowList []string
+ blockList []string
upperCaseConst bool
}
@@ -265,17 +264,17 @@ func (w *lintNames) Visit(n ast.Node) ast.Visitor {
}
func getList(arg any, argName string) []string {
- temp, ok := arg.([]any)
+ args, ok := arg.([]any)
if !ok {
panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg))
}
var list []string
- for _, v := range temp {
- if val, ok := v.(string); ok {
- list = append(list, val)
- } else {
+ for _, v := range args {
+ val, ok := v.(string)
+ if !ok {
panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg))
}
+ list = append(list, val)
}
return list
}
diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
index 98644f41c..a2d304ae5 100644
--- a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
+++ b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
@@ -51,7 +51,7 @@ func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor {
})
}
- return nil
+ return nil // skip visiting function body
}
func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool {
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
index 7d7f8b10c..67467b717 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
+++ b/vendor/github.com/nunnatsa/ginkgolinter/.gitignore
@@ -1,2 +1,3 @@
ginkgolinter
bin/
+e2e \ No newline at end of file
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/Makefile b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
index 586633006..8ddd8c42c 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/Makefile
+++ b/vendor/github.com/nunnatsa/ginkgolinter/Makefile
@@ -5,7 +5,7 @@ HASH_FLAG := -X github.com/nunnatsa/ginkgolinter/version.gitHash=$(COMMIT_HASH)
BUILD_ARGS := -ldflags "$(VERSION_FLAG) $(HASH_FLAG)"
-build: unit-test
+build: goimports
go build $(BUILD_ARGS) -o ginkgolinter ./cmd/ginkgolinter
unit-test:
@@ -23,5 +23,11 @@ build-for-linux:
build-all: build build-for-linux build-for-mac build-for-windows
-test: build
- ./tests/e2e.sh
+test-cli:
+ cd tests; go test -v ./
+
+test: unit-test test-cli
+
+goimports:
+ go install golang.org/x/tools/cmd/goimports@latest
+ goimports -w -local="github.com/nunnatsa/ginkgolinter" $(shell find . -type f -name '*.go' ! -path "*/vendor/*")
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/README.md b/vendor/github.com/nunnatsa/ginkgolinter/README.md
index 977cec903..536a65e7b 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/README.md
+++ b/vendor/github.com/nunnatsa/ginkgolinter/README.md
@@ -249,6 +249,28 @@ This will probably happen when using the old format:
Eventually(aFunc, 500 * time.Millisecond /*timeout*/, 10 * time.Second /*polling*/).Should(Succeed())
```
+### Correct usage of the `Succeed()` matcher [Bug]
+The `Succeed()` matcher only accepts a single error value. this rule validates that.
+
+For example:
+ ```go
+ Expect(42).To(Succeed())
+ ```
+
+But mostly, we want to avoid using this matcher with functions that return multiple values, even if their last
+returned value is an error, because this is not supported:
+ ```go
+ Expect(os.Open("myFile.txt")).To(Succeed())
+ ```
+
+In async assertions (like `Eventually()`), the `Succeed()` matcher may also been used with functions that accept
+a Gomega object as their first parameter, and returns nothing, e.g. this is a valid usage of `Eventually`
+ ```go
+ Eventually(func(g Gomega){
+ g.Expect(true).To(BeTrue())
+ }).WithTimeout(10 * time.Millisecond).WithPolling(time.Millisecond).Should(Succeed())
+ ```
+
### Avoid Spec Pollution: Don't Initialize Variables in Container Nodes [BUG/STYLE]:
***Note***: Only applied when the `--forbid-spec-pollution=true` flag is set (disabled by default).
@@ -476,6 +498,30 @@ will be changed to:
```go
Eventually(aFunc, time.Second*5, time.Second*polling)
```
+
+### Correct usage of the `Succeed()` and the `HaveOccurred()` matchers
+This rule enforces using the `Success()` matcher only for functions, and the `HaveOccurred()` matcher only for error
+values.
+
+For example:
+ ```go
+ Expect(err).To(Succeed())
+ ```
+will trigger a warning with a suggestion to replace the mather to
+ ```go
+ Expect(err).ToNot(HaveOccurred())
+ ```
+
+and vice versa:
+ ```go
+ Expect(myErrorFunc()).ToNot(HaveOccurred())
+ ```
+will trigger a warning with a suggestion to replace the mather to
+ ```go
+ Expect(myErrorFunc()).To(Succeed())
+ ```
+***This rule is disabled by default***. Use the `--force-succeed=true` command line flag to enable it.
+
## Suppress the linter
### Suppress warning from command line
* Use the `--suppress-len-assertion=true` flag to suppress the wrong length and cap assertions warning
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
index edff57acd..dbc39aba5 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/analyzer.go
@@ -25,13 +25,14 @@ func NewAnalyzerWithConfig(config *types.Config) *analysis.Analyzer {
// NewAnalyzer returns an Analyzer - the package interface with nogo
func NewAnalyzer() *analysis.Analyzer {
config := &types.Config{
- SuppressLen: false,
- SuppressNil: false,
- SuppressErr: false,
- SuppressCompare: false,
- ForbidFocus: false,
- AllowHaveLen0: false,
- ForceExpectTo: false,
+ SuppressLen: false,
+ SuppressNil: false,
+ SuppressErr: false,
+ SuppressCompare: false,
+ ForbidFocus: false,
+ AllowHaveLen0: false,
+ ForceExpectTo: false,
+ ForceSucceedForFuncs: false,
}
a := NewAnalyzerWithConfig(config)
@@ -50,6 +51,7 @@ func NewAnalyzer() *analysis.Analyzer {
a.Flags.BoolVar(&ignored, "suppress-focus-container", true, "Suppress warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt. Deprecated and ignored: use --forbid-focus-container instead")
a.Flags.Var(&config.ForbidFocus, "forbid-focus-container", "trigger a warning for ginkgo focus containers like FDescribe, FContext, FWhen or FIt; default = false.")
a.Flags.Var(&config.ForbidSpecPollution, "forbid-spec-pollution", "trigger a warning for variable assignments in ginkgo containers like Describe, Context and When, instead of in BeforeEach(); default = false.")
+ a.Flags.Var(&config.ForceSucceedForFuncs, "force-succeed", "force using the Succeed matcher for error functions, and the HaveOccurred matcher for non-function error values")
return a
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/doc.go b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
index dd9ecf58a..c07b6a316 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/doc.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/doc.go
@@ -30,6 +30,14 @@ For example:
This will probably happen when using the old format:
Eventually(aFunc, 500 * time.Millisecond, 10 * time.Second).Should(Succeed())
+* Success matcher validation: [BUG]
+ The Success matcher expect that the actual argument will be a single error. In async actual assertions, It also allow
+ functions with Gomega object as the function first parameter.
+For example:
+ Expect(myInt).To(Succeed())
+or
+ Eventually(func() int { return 42 }).Should(Succeed())
+
* reject variable assignments in ginkgo containers [Bug/Style]:
For example:
var _ = Describe("description", func(){
@@ -96,4 +104,13 @@ methods.
For example:
Eventually(context.Background(), func() bool { return true }, "1s").Should(BeTrue())
Eventually(context.Background(), func() bool { return true }, time.Second*60, 15).Should(BeTrue())
+
+* Success <=> Eventually usage [Style]
+ enforce that the Succeed() matcher will be used for error functions, and the HaveOccurred() matcher will
+ be used for error values.
+
+For example:
+ Expect(err).ToNot(Succeed())
+or
+ Expect(funcRetError().ToNot(HaveOccurred())
`
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go
new file mode 100644
index 000000000..8e3df5d3f
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actual.go
@@ -0,0 +1,118 @@
+package actual
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+)
+
+type Actual struct {
+ Orig *ast.CallExpr
+ Clone *ast.CallExpr
+ Arg ArgPayload
+ argType gotypes.Type
+ isTuple bool
+ isAsync bool
+ asyncArg *AsyncArg
+ actualOffset int
+}
+
+func New(origExpr, cloneExpr *ast.CallExpr, orig *ast.CallExpr, clone *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, timePkg string) (*Actual, bool) {
+ funcName, ok := handler.GetActualFuncName(orig)
+ if !ok {
+ return nil, false
+ }
+
+ arg, actualOffset := getActualArgPayload(orig, clone, pass, funcName)
+ if arg == nil {
+ return nil, false
+ }
+
+ argType := pass.TypesInfo.TypeOf(orig.Args[actualOffset])
+ isTuple := false
+
+ if tpl, ok := argType.(*gotypes.Tuple); ok {
+ if tpl.Len() > 0 {
+ argType = tpl.At(0).Type()
+ } else {
+ argType = nil
+ }
+
+ isTuple = tpl.Len() > 1
+ }
+
+ isAsyncExpr := gomegainfo.IsAsyncActualMethod(funcName)
+
+ var asyncArg *AsyncArg
+ if isAsyncExpr {
+ asyncArg = newAsyncArg(origExpr, cloneExpr, orig, clone, argType, pass, actualOffset, timePkg)
+ }
+
+ return &Actual{
+ Orig: orig,
+ Clone: clone,
+ Arg: arg,
+ argType: argType,
+ isTuple: isTuple,
+ isAsync: isAsyncExpr,
+ asyncArg: asyncArg,
+ actualOffset: actualOffset,
+ }, true
+}
+
+func (a *Actual) ReplaceActual(newArgs ast.Expr) {
+ a.Clone.Args[a.actualOffset] = newArgs
+}
+
+func (a *Actual) ReplaceActualWithItsFirstArg() {
+ firstArgOfArg := a.Clone.Args[a.actualOffset].(*ast.CallExpr).Args[0]
+ a.ReplaceActual(firstArgOfArg)
+}
+
+func (a *Actual) IsAsync() bool {
+ return a.isAsync
+}
+
+func (a *Actual) IsTuple() bool {
+ return a.isTuple
+}
+
+func (a *Actual) ArgGOType() gotypes.Type {
+ return a.argType
+}
+
+func (a *Actual) GetAsyncArg() *AsyncArg {
+ return a.asyncArg
+}
+
+func (a *Actual) AppendWithArgsMethod() {
+ if a.asyncArg.fun != nil {
+ if len(a.asyncArg.fun.Args) > 0 {
+ actualOrigFunc := a.Clone.Fun
+ actualOrigArgs := a.Clone.Args
+
+ actualOrigArgs[a.actualOffset] = a.asyncArg.fun.Fun
+ call := &ast.SelectorExpr{
+ Sel: ast.NewIdent("WithArguments"),
+ X: &ast.CallExpr{
+ Fun: actualOrigFunc,
+ Args: actualOrigArgs,
+ },
+ }
+
+ a.Clone.Fun = call
+ a.Clone.Args = a.asyncArg.fun.Args
+ a.Clone = a.Clone.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr)
+ } else {
+ a.Clone.Args[a.actualOffset] = a.asyncArg.fun.Fun
+ }
+ }
+}
+
+func (a *Actual) GetActualArg() ast.Expr {
+ return a.Clone.Args[a.actualOffset]
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go
new file mode 100644
index 000000000..9d251c468
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/actualarg.go
@@ -0,0 +1,235 @@
+package actual
+
+import (
+ "go/ast"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+)
+
+type ArgType uint64
+
+const (
+ UnknownActualArgType ArgType = 1 << iota
+ ErrActualArgType
+ LenFuncActualArgType
+ CapFuncActualArgType
+ ComparisonActualArgType
+ LenComparisonActualArgType
+ CapComparisonActualArgType
+ NilComparisonActualArgType
+ BinaryComparisonActualArgType
+ FuncSigArgType
+ ErrFuncActualArgType
+ GomegaParamArgType
+ MultiRetsArgType
+
+ ErrorTypeArgType
+
+ EqualZero
+ GreaterThanZero
+)
+
+func (a ArgType) Is(val ArgType) bool {
+ return a&val != 0
+}
+
+func getActualArgPayload(origActualExpr, actualExprClone *ast.CallExpr, pass *analysis.Pass, actualMethodName string) (ArgPayload, int) {
+ origArgExpr, argExprClone, actualOffset, isGomegaExpr := getActualArg(origActualExpr, actualExprClone, actualMethodName, pass)
+ if !isGomegaExpr {
+ return nil, 0
+ }
+
+ var arg ArgPayload
+
+ if value.IsExprError(pass, origArgExpr) {
+ arg = newErrPayload(origArgExpr, argExprClone, pass)
+ } else {
+ switch expr := origArgExpr.(type) {
+ case *ast.CallExpr:
+ arg = newFuncCallArgPayload(expr, argExprClone.(*ast.CallExpr))
+
+ case *ast.BinaryExpr:
+ arg = parseBinaryExpr(expr, argExprClone.(*ast.BinaryExpr), pass)
+
+ default:
+ t := pass.TypesInfo.TypeOf(origArgExpr)
+ if sig, ok := t.(*gotypes.Signature); ok {
+ arg = getAsyncFuncArg(sig)
+ }
+ }
+
+ }
+
+ if arg != nil {
+ return arg, actualOffset
+ }
+
+ return newRegularArgPayload(origArgExpr, argExprClone, pass), actualOffset
+}
+
+func getActualArg(origActualExpr *ast.CallExpr, actualExprClone *ast.CallExpr, actualMethodName string, pass *analysis.Pass) (ast.Expr, ast.Expr, int, bool) {
+ var (
+ origArgExpr ast.Expr
+ argExprClone ast.Expr
+ )
+
+ funcOffset := gomegainfo.ActualArgOffset(actualMethodName)
+ if funcOffset < 0 {
+ return nil, nil, 0, false
+ }
+
+ if len(origActualExpr.Args) <= funcOffset {
+ return nil, nil, 0, false
+ }
+
+ origArgExpr = origActualExpr.Args[funcOffset]
+ argExprClone = actualExprClone.Args[funcOffset]
+
+ if gomegainfo.IsAsyncActualMethod(actualMethodName) {
+ if pass.TypesInfo.TypeOf(origArgExpr).String() == "context.Context" {
+ funcOffset++
+ if len(origActualExpr.Args) <= funcOffset {
+ return nil, nil, 0, false
+ }
+
+ origArgExpr = origActualExpr.Args[funcOffset]
+ argExprClone = actualExprClone.Args[funcOffset]
+ }
+ }
+
+ return origArgExpr, argExprClone, funcOffset, true
+}
+
+type ArgPayload interface {
+ ArgType() ArgType
+}
+
+type RegularArgPayload struct {
+ value.Value
+}
+
+func newRegularArgPayload(orig, clone ast.Expr, pass *analysis.Pass) *RegularArgPayload {
+ return &RegularArgPayload{
+ Value: value.New(orig, clone, pass),
+ }
+}
+
+func (*RegularArgPayload) ArgType() ArgType {
+ return UnknownActualArgType
+}
+
+type FuncCallArgPayload struct {
+ argType ArgType
+
+ origFunc *ast.CallExpr
+ cloneFunc *ast.CallExpr
+
+ origVal ast.Expr
+ cloneVal ast.Expr
+}
+
+func newFuncCallArgPayload(orig, clone *ast.CallExpr) ArgPayload {
+ funcName, ok := builtinFuncName(orig)
+ if !ok {
+ return nil
+ }
+
+ if len(orig.Args) != 1 {
+ return nil
+ }
+
+ var argType ArgType
+ switch funcName {
+ case "len":
+ argType = LenFuncActualArgType
+ case "cap":
+ argType = CapFuncActualArgType
+ default:
+ return nil
+ }
+
+ return &FuncCallArgPayload{
+ argType: argType,
+ origFunc: orig,
+ cloneFunc: clone,
+ origVal: orig.Args[0],
+ cloneVal: clone.Args[0],
+ }
+}
+
+func (f *FuncCallArgPayload) ArgType() ArgType {
+ return f.argType
+}
+
+type ErrPayload struct {
+ value.Valuer
+}
+
+func newErrPayload(orig, clone ast.Expr, pass *analysis.Pass) *ErrPayload {
+ return &ErrPayload{
+ Valuer: value.GetValuer(orig, clone, pass),
+ }
+}
+
+func (*ErrPayload) ArgType() ArgType {
+ return ErrActualArgType | ErrorTypeArgType
+}
+
+func parseBinaryExpr(origActualExpr, argExprClone *ast.BinaryExpr, pass *analysis.Pass) ArgPayload {
+ left, right, op := origActualExpr.X, origActualExpr.Y, origActualExpr.Op
+ replace := false
+ switch realFirst := left.(type) {
+ case *ast.Ident: // check if const
+ info, ok := pass.TypesInfo.Types[realFirst]
+ if ok {
+ if value.Is[*gotypes.Basic](info.Type) && (info.Value != nil || info.IsNil()) {
+ replace = true
+ }
+ }
+
+ case *ast.BasicLit:
+ replace = true
+ }
+
+ if replace {
+ left, right = right, left
+ }
+
+ switch op {
+ case token.EQL:
+ case token.NEQ:
+ case token.GTR, token.GEQ, token.LSS, token.LEQ:
+ if replace {
+ op = reverseassertion.ChangeCompareOperator(op)
+ }
+ default:
+ return nil
+ }
+
+ leftClone, rightClone := argExprClone.X, argExprClone.Y
+ if replace {
+ leftClone, rightClone = rightClone, leftClone
+ }
+
+ leftVal := value.GetValuer(left, leftClone, pass)
+ rightVal := value.GetValuer(right, rightClone, pass)
+
+ if value.IsNil(right, pass) {
+ return newNilComparisonPayload(leftVal, rightVal, op)
+ }
+
+ leftVal.IsFunc()
+ if firstFunc, ok := left.(*ast.CallExpr); ok {
+ if payload, ok := newFuncComparisonPayload(firstFunc, leftClone.(*ast.CallExpr), right, rightClone, op, pass); ok {
+ return payload
+ }
+ }
+
+ return newComparisonArgPayload(leftVal, rightVal, op)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go
new file mode 100644
index 000000000..7c5df2a34
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncactual.go
@@ -0,0 +1,123 @@
+package actual
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/intervals"
+)
+
+type AsyncArg struct {
+ valid bool
+ fun *ast.CallExpr
+
+ timeoutInterval intervals.DurationValue
+ pollingInterval intervals.DurationValue
+ tooManyTimeouts bool
+ tooManyPolling bool
+}
+
+func newAsyncArg(origExpr, cloneExpr, orig, clone *ast.CallExpr, argType gotypes.Type, pass *analysis.Pass, actualOffset int, timePkg string) *AsyncArg {
+ var (
+ fun *ast.CallExpr
+ valid = true
+ timeout intervals.DurationValue
+ polling intervals.DurationValue
+ )
+
+ if _, isActualFuncCall := orig.Args[actualOffset].(*ast.CallExpr); isActualFuncCall {
+ fun = clone.Args[actualOffset].(*ast.CallExpr)
+ valid = isValidAsyncValueType(argType)
+ }
+
+ timeoutOffset := actualOffset + 1
+ //var err error
+ tooManyTimeouts := false
+ tooManyPolling := false
+
+ if len(orig.Args) > timeoutOffset {
+ timeout = intervals.GetDuration(pass, timeoutOffset, orig.Args[timeoutOffset], clone.Args[timeoutOffset], timePkg)
+ pollingOffset := actualOffset + 2
+ if len(orig.Args) > pollingOffset {
+ polling = intervals.GetDuration(pass, pollingOffset, orig.Args[pollingOffset], clone.Args[pollingOffset], timePkg)
+ }
+ }
+ selOrig := origExpr.Fun.(*ast.SelectorExpr)
+ selClone := cloneExpr.Fun.(*ast.SelectorExpr)
+
+ for {
+ callOrig, ok := selOrig.X.(*ast.CallExpr)
+ if !ok {
+ break
+ }
+ callClone := selClone.X.(*ast.CallExpr)
+
+ funOrig, ok := callOrig.Fun.(*ast.SelectorExpr)
+ if !ok {
+ break
+ }
+ funClone := callClone.Fun.(*ast.SelectorExpr)
+
+ switch funOrig.Sel.Name {
+ case "WithTimeout", "Within":
+ if timeout != nil {
+ tooManyTimeouts = true
+ } else if len(callOrig.Args) == 1 {
+ timeout = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0])
+ }
+
+ case "WithPolling", "ProbeEvery":
+ if polling != nil {
+ tooManyPolling = true
+ } else if len(callOrig.Args) == 1 {
+ polling = intervals.GetDurationFromValue(pass, callOrig.Args[0], callClone.Args[0])
+ }
+ }
+
+ selOrig = funOrig
+ selClone = funClone
+ }
+
+ return &AsyncArg{
+ valid: valid,
+ fun: fun,
+ timeoutInterval: timeout,
+ pollingInterval: polling,
+ tooManyTimeouts: tooManyTimeouts,
+ tooManyPolling: tooManyPolling,
+ }
+}
+
+func (a *AsyncArg) IsValid() bool {
+ return a.valid
+}
+
+func (a *AsyncArg) Timeout() intervals.DurationValue {
+ return a.timeoutInterval
+}
+
+func (a *AsyncArg) Polling() intervals.DurationValue {
+ return a.pollingInterval
+}
+
+func (a *AsyncArg) TooManyTimeouts() bool {
+ return a.tooManyTimeouts
+}
+
+func (a *AsyncArg) TooManyPolling() bool {
+ return a.tooManyPolling
+}
+
+func isValidAsyncValueType(t gotypes.Type) bool {
+ switch t.(type) {
+ // allow functions that return function or channel.
+ case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer:
+ return true
+ case *gotypes.Named:
+ return isValidAsyncValueType(t.Underlying())
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go
new file mode 100644
index 000000000..c777cd4a7
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/asyncfuncarg.go
@@ -0,0 +1,38 @@
+package actual
+
+import (
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+func getAsyncFuncArg(sig *gotypes.Signature) ArgPayload {
+ argType := FuncSigArgType
+ if sig.Results().Len() == 1 {
+ if interfaces.ImplementsError(sig.Results().At(0).Type().Underlying()) {
+ argType |= ErrFuncActualArgType | ErrorTypeArgType
+ }
+ }
+
+ if sig.Params().Len() > 0 {
+ arg := sig.Params().At(0).Type()
+ if gomegainfo.IsGomegaType(arg) && sig.Results().Len() == 0 {
+ argType |= FuncSigArgType | GomegaParamArgType
+ }
+ }
+
+ if sig.Results().Len() > 1 {
+ argType |= FuncSigArgType | MultiRetsArgType
+ }
+
+ return &FuncSigArgPayload{argType: argType}
+}
+
+type FuncSigArgPayload struct {
+ argType ArgType
+}
+
+func (f FuncSigArgPayload) ArgType() ArgType {
+ return f.argType
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go
new file mode 100644
index 000000000..2b16402db
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/actual/comparisonAsserion.go
@@ -0,0 +1,260 @@
+package actual
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+type ComparisonActualPayload interface {
+ GetOp() token.Token
+ GetLeft() value.Valuer
+ GetRight() value.Valuer
+}
+
+type FuncComparisonPayload struct {
+ op token.Token
+ argType ArgType
+ val value.Valuer
+ left value.Valuer
+ arg ast.Expr
+}
+
+func newFuncComparisonPayload(origLeft, leftClone *ast.CallExpr, origRight, rightClone ast.Expr, op token.Token, pass *analysis.Pass) (*FuncComparisonPayload, bool) {
+
+ funcName, ok := builtinFuncName(origLeft)
+ if !ok {
+ return nil, false
+ }
+
+ if len(origLeft.Args) != 1 {
+ return nil, false
+ }
+
+ left := value.GetValuer(origLeft, leftClone, pass)
+ val := value.GetValuer(origRight, rightClone, pass)
+
+ argType := ComparisonActualArgType
+ switch funcName {
+ case "len":
+ argType |= LenComparisonActualArgType
+
+ if val.IsValueNumeric() {
+ if val.IsValueZero() {
+ switch op {
+ case token.EQL:
+ argType |= EqualZero
+
+ case token.NEQ, token.GTR:
+ argType |= GreaterThanZero
+ }
+ } else if val.GetValue().String() == "1" && op == token.GEQ {
+ argType |= GreaterThanZero
+ }
+ }
+
+ if !argType.Is(GreaterThanZero) && op != token.EQL && op != token.NEQ {
+ return nil, false
+ }
+
+ case "cap":
+ if op != token.EQL && op != token.NEQ {
+ return nil, false
+ }
+ argType |= CapComparisonActualArgType
+
+ default:
+ return nil, false
+ }
+
+ return &FuncComparisonPayload{
+ op: op,
+ argType: argType,
+ val: val,
+ left: left,
+ arg: leftClone.Args[0],
+ }, true
+}
+
+func (f *FuncComparisonPayload) GetLeft() value.Valuer {
+ return f.left
+}
+
+func (f *FuncComparisonPayload) GetRight() value.Valuer {
+ return f.val
+}
+
+func (f *FuncComparisonPayload) ArgType() ArgType {
+ return f.argType
+}
+
+func (f *FuncComparisonPayload) GetOp() token.Token {
+ return f.op
+}
+
+func (f *FuncComparisonPayload) GetValue() constant.Value {
+ return f.val.GetValue()
+}
+
+func (f *FuncComparisonPayload) GetType() gotypes.Type {
+ return f.val.GetType()
+}
+
+func (f *FuncComparisonPayload) GetValueExpr() ast.Expr {
+ return f.val.GetValueExpr()
+}
+
+func (f *FuncComparisonPayload) IsError() bool {
+ return f.val.IsError()
+}
+
+func (f *FuncComparisonPayload) IsValueZero() bool {
+ return f.val.IsValueZero()
+}
+
+func (f *FuncComparisonPayload) IsFunc() bool {
+ return true
+}
+
+func (f *FuncComparisonPayload) IsValueNumeric() bool {
+ return f.val.IsValueNumeric()
+}
+
+func (f *FuncComparisonPayload) IsValueInt() bool {
+ return f.val.IsValueInt()
+}
+
+func (f *FuncComparisonPayload) IsInterface() bool {
+ return f.val.IsInterface()
+}
+
+func (f *FuncComparisonPayload) IsPointer() bool {
+ return f.val.IsPointer()
+}
+
+func (f *FuncComparisonPayload) GetFuncArg() ast.Expr {
+ return f.arg
+}
+
+type ComparisonArgPayload struct {
+ left value.Valuer
+ right value.Valuer
+ op token.Token
+}
+
+func newComparisonArgPayload(left, right value.Valuer, op token.Token) *ComparisonArgPayload {
+ return &ComparisonArgPayload{
+ left: left,
+ right: right,
+ op: op,
+ }
+}
+
+func (*ComparisonArgPayload) ArgType() ArgType {
+ return BinaryComparisonActualArgType | ComparisonActualArgType
+}
+
+func (c *ComparisonArgPayload) GetOp() token.Token {
+ return c.op
+}
+
+func (c *ComparisonArgPayload) GetLeft() value.Valuer {
+ return c.left
+}
+
+func (c *ComparisonArgPayload) GetRight() value.Valuer {
+ return c.right
+}
+
+type NilComparisonPayload struct {
+ val value.Valuer
+ right value.Valuer
+ op token.Token
+}
+
+func newNilComparisonPayload(val, right value.Valuer, op token.Token) *NilComparisonPayload {
+ return &NilComparisonPayload{
+ val: val,
+ right: right,
+ op: op,
+ }
+}
+
+func (*NilComparisonPayload) ArgType() ArgType {
+ return NilComparisonActualArgType
+}
+
+func (n *NilComparisonPayload) GetLeft() value.Valuer {
+ return n.val
+}
+
+func (n *NilComparisonPayload) GetRight() value.Valuer {
+ return n.right
+}
+
+func (n *NilComparisonPayload) GetType() gotypes.Type {
+ return n.val.GetType()
+}
+
+func (n *NilComparisonPayload) GetValue() constant.Value {
+ return n.val.GetValue()
+}
+
+func (n *NilComparisonPayload) GetValueExpr() ast.Expr {
+ return n.val.GetValueExpr()
+}
+
+func (n *NilComparisonPayload) IsValueInt() bool {
+ return n.val.IsValueInt()
+}
+
+func (n *NilComparisonPayload) IsError() bool {
+ return n.val.IsError()
+}
+
+func (n *NilComparisonPayload) IsValueNumeric() bool {
+ return n.val.IsValueNumeric()
+}
+
+func (n *NilComparisonPayload) IsFunc() bool {
+ return n.val.IsFunc()
+}
+
+func (n *NilComparisonPayload) IsValueZero() bool {
+ return n.val.IsValueZero()
+}
+
+func (n *NilComparisonPayload) IsInterface() bool {
+ return n.val.IsInterface()
+}
+
+func (n *NilComparisonPayload) IsPointer() bool {
+ return n.val.IsPointer()
+}
+
+func (n *NilComparisonPayload) GetOp() token.Token {
+ return n.op
+}
+
+func builtinFuncName(callExpr *ast.CallExpr) (string, bool) {
+ argFunc, ok := callExpr.Fun.(*ast.Ident)
+ if !ok {
+ return "", false
+ }
+
+ if len(callExpr.Args) != 1 {
+ return "", false
+ }
+
+ switch name := argFunc.Name; name {
+ case "len", "cap", "min", "max":
+ return name, true
+ default:
+ return "", false
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go
new file mode 100644
index 000000000..976e726fc
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/expression.go
@@ -0,0 +1,315 @@
+package expression
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
+
+ "github.com/go-toolsmith/astcopy"
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+)
+
+type GomegaExpression struct {
+ orig *ast.CallExpr
+ clone *ast.CallExpr
+
+ assertionFuncName string
+ origAssertionFuncName string
+ actualFuncName string
+
+ isAsync bool
+
+ actual *actual.Actual
+ matcher *matcher.Matcher
+
+ handler gomegahandler.Handler
+}
+
+func New(origExpr *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, timePkg string) (*GomegaExpression, bool) {
+ actualMethodName, ok := handler.GetActualFuncName(origExpr)
+ if !ok || !gomegainfo.IsActualMethod(actualMethodName) {
+ return nil, false
+ }
+
+ origSel, ok := origExpr.Fun.(*ast.SelectorExpr)
+ if !ok || !gomegainfo.IsAssertionFunc(origSel.Sel.Name) {
+ return &GomegaExpression{
+ orig: origExpr,
+ actualFuncName: actualMethodName,
+ }, true
+ }
+
+ exprClone := astcopy.CallExpr(origExpr)
+ selClone := exprClone.Fun.(*ast.SelectorExpr)
+
+ origActual := handler.GetActualExpr(origSel)
+ if origActual == nil {
+ return nil, false
+ }
+
+ actualClone := handler.GetActualExprClone(origSel, selClone)
+ if actualClone == nil {
+ return nil, false
+ }
+
+ actl, ok := actual.New(origExpr, exprClone, origActual, actualClone, pass, handler, timePkg)
+ if !ok {
+ return nil, false
+ }
+
+ origMatcher, ok := origExpr.Args[0].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ matcherClone := exprClone.Args[0].(*ast.CallExpr)
+
+ mtchr, ok := matcher.New(origMatcher, matcherClone, pass, handler)
+ if !ok {
+ return nil, false
+ }
+
+ exprClone.Args[0] = mtchr.Clone
+
+ gexp := &GomegaExpression{
+ orig: origExpr,
+ clone: exprClone,
+
+ assertionFuncName: origSel.Sel.Name,
+ origAssertionFuncName: origSel.Sel.Name,
+ actualFuncName: actualMethodName,
+
+ isAsync: actl.IsAsync(),
+
+ actual: actl,
+ matcher: mtchr,
+
+ handler: handler,
+ }
+
+ if mtchr.ShouldReverseLogic() {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ return gexp, true
+}
+
+func (e *GomegaExpression) IsMissingAssertion() bool {
+ return e.matcher == nil
+}
+
+func (e *GomegaExpression) GetActualFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.actualFuncName
+}
+
+func (e *GomegaExpression) GetAssertFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.assertionFuncName
+}
+
+func (e *GomegaExpression) GetOrigAssertFuncName() string {
+ if e == nil {
+ return ""
+ }
+ return e.origAssertionFuncName
+}
+
+func (e *GomegaExpression) IsAsync() bool {
+ return e.isAsync
+}
+
+func (e *GomegaExpression) ReverseAssertionFuncLogic() {
+ assertionFunc := e.clone.Fun.(*ast.SelectorExpr).Sel
+ newName := reverseassertion.ChangeAssertionLogic(assertionFunc.Name)
+ assertionFunc.Name = newName
+ e.assertionFuncName = newName
+}
+
+func (e *GomegaExpression) ReplaceAssertionMethod(name string) {
+ e.clone.Fun.(*ast.SelectorExpr).Sel.Name = name
+}
+
+func (e *GomegaExpression) ReplaceMatcherFuncName(name string) {
+ e.matcher.ReplaceMatcherFuncName(name)
+}
+
+func (e *GomegaExpression) ReplaceMatcherArgs(newArgs []ast.Expr) {
+ e.matcher.ReplaceMatcherArgs(newArgs)
+}
+
+func (e *GomegaExpression) RemoveMatcherArgs() {
+ e.matcher.ReplaceMatcherArgs(nil)
+}
+
+func (e *GomegaExpression) ReplaceActual(newArg ast.Expr) {
+ e.actual.ReplaceActual(newArg)
+}
+
+func (e *GomegaExpression) ReplaceActualWithItsFirstArg() {
+ e.actual.ReplaceActualWithItsFirstArg()
+}
+
+func (e *GomegaExpression) replaceMathcerFuncNoArgs(name string) {
+ e.matcher.ReplaceMatcherFuncName(name)
+ e.RemoveMatcherArgs()
+}
+
+func (e *GomegaExpression) SetMatcherBeZero() {
+ e.replaceMathcerFuncNoArgs("BeZero")
+}
+
+func (e *GomegaExpression) SetMatcherBeEmpty() {
+ e.replaceMathcerFuncNoArgs("BeEmpty")
+}
+
+func (e *GomegaExpression) SetLenNumericMatcher() {
+ if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() {
+ e.SetMatcherBeEmpty()
+ } else {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()})
+ }
+}
+
+func (e *GomegaExpression) SetLenNumericActual() {
+ if m, ok := e.matcher.GetMatcherInfo().(value.Valuer); ok && m.IsValueZero() {
+ e.SetMatcherBeEmpty()
+ } else {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{m.GetValueExpr()})
+ }
+}
+
+func (e *GomegaExpression) SetMatcherLen(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("HaveLen")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherCap(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("HaveCap")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherCapZero() {
+ e.ReplaceMatcherFuncName("HaveCap")
+ e.ReplaceMatcherArgs([]ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}})
+}
+
+func (e *GomegaExpression) SetMatcherSucceed() {
+ e.replaceMathcerFuncNoArgs("Succeed")
+}
+
+func (e *GomegaExpression) SetMatcherHaveOccurred() {
+ e.replaceMathcerFuncNoArgs("HaveOccurred")
+}
+
+func (e *GomegaExpression) SetMatcherBeNil() {
+ e.replaceMathcerFuncNoArgs("BeNil")
+}
+
+func (e *GomegaExpression) SetMatcherBeTrue() {
+ e.replaceMathcerFuncNoArgs("BeTrue")
+}
+
+func (e *GomegaExpression) SetMatcherBeFalse() {
+ e.replaceMathcerFuncNoArgs("BeFalse")
+}
+
+func (e *GomegaExpression) SetMatcherHaveValue() {
+ newMatcherExp := e.handler.GetNewWrapperMatcher("HaveValue", e.matcher.Clone)
+ e.clone.Args[0] = newMatcherExp
+ e.matcher.Clone = newMatcherExp
+}
+
+func (e *GomegaExpression) SetMatcherEqual(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("Equal")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherBeIdenticalTo(arg ast.Expr) {
+ e.ReplaceMatcherFuncName("BeIdenticalTo")
+ e.ReplaceMatcherArgs([]ast.Expr{arg})
+}
+
+func (e *GomegaExpression) SetMatcherBeNumerically(op token.Token, arg ast.Expr) {
+ e.ReplaceMatcherFuncName("BeNumerically")
+ e.ReplaceMatcherArgs([]ast.Expr{
+ &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf("%q", op.String())},
+ arg,
+ })
+}
+
+func (e *GomegaExpression) IsNegativeAssertion() bool {
+ return reverseassertion.IsNegativeLogic(e.assertionFuncName)
+}
+
+func (e *GomegaExpression) GetClone() *ast.CallExpr {
+ return e.clone
+}
+
+// Actual proxies:
+
+func (e *GomegaExpression) GetActualClone() *ast.CallExpr {
+ return e.actual.Clone
+}
+
+func (e *GomegaExpression) AppendWithArgsToActual() {
+ e.actual.AppendWithArgsMethod()
+}
+
+func (e *GomegaExpression) GetAsyncActualArg() *actual.AsyncArg {
+ return e.actual.GetAsyncArg()
+}
+
+func (e *GomegaExpression) GetActualArg() actual.ArgPayload {
+ return e.actual.Arg
+}
+
+func (e *GomegaExpression) GetActualArgExpr() ast.Expr {
+ return e.actual.GetActualArg()
+}
+
+func (e *GomegaExpression) GetActualArgGOType() gotypes.Type {
+ return e.actual.ArgGOType()
+}
+
+func (e *GomegaExpression) ActualArgTypeIs(other actual.ArgType) bool {
+ return e.actual.Arg.ArgType().Is(other)
+}
+
+func (e *GomegaExpression) IsActualTuple() bool {
+ return e.actual.IsTuple()
+}
+
+// Matcher proxies
+
+func (e *GomegaExpression) GetMatcher() *matcher.Matcher {
+ return e.matcher
+}
+
+func (e *GomegaExpression) GetMatcherInfo() matcher.Info {
+ return e.matcher.GetMatcherInfo()
+}
+
+func (e *GomegaExpression) MatcherTypeIs(other matcher.Type) bool {
+ return e.matcher.GetMatcherInfo().Type().Is(other)
+}
+
+func (e *GomegaExpression) FormatOrig(frm *formatter.GoFmtFormatter) string {
+ return frm.Format(e.orig)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go
new file mode 100644
index 000000000..24272535d
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/bematchers.go
@@ -0,0 +1,77 @@
+package matcher
+
+import "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+
+type BeIdenticalToMatcher struct {
+ value.Value
+}
+
+func (BeIdenticalToMatcher) Type() Type {
+ return BeIdenticalToMatcherType
+}
+
+func (BeIdenticalToMatcher) MatcherName() string {
+ return beIdenticalTo
+}
+
+type BeEquivalentToMatcher struct {
+ value.Value
+}
+
+func (BeEquivalentToMatcher) Type() Type {
+ return BeEquivalentToMatcherType
+}
+
+func (BeEquivalentToMatcher) MatcherName() string {
+ return beEquivalentTo
+}
+
+type BeZeroMatcher struct{}
+
+func (BeZeroMatcher) Type() Type {
+ return BeZeroMatcherType
+}
+
+func (BeZeroMatcher) MatcherName() string {
+ return beZero
+}
+
+type BeEmptyMatcher struct{}
+
+func (BeEmptyMatcher) Type() Type {
+ return BeEmptyMatcherType
+}
+
+func (BeEmptyMatcher) MatcherName() string {
+ return beEmpty
+}
+
+type BeTrueMatcher struct{}
+
+func (BeTrueMatcher) Type() Type {
+ return BeTrueMatcherType | BoolValueTrue
+}
+
+func (BeTrueMatcher) MatcherName() string {
+ return beTrue
+}
+
+type BeFalseMatcher struct{}
+
+func (BeFalseMatcher) Type() Type {
+ return BeFalseMatcherType | BoolValueFalse
+}
+
+func (BeFalseMatcher) MatcherName() string {
+ return beFalse
+}
+
+type BeNilMatcher struct{}
+
+func (BeNilMatcher) Type() Type {
+ return BeNilMatcherType
+}
+
+func (BeNilMatcher) MatcherName() string {
+ return beNil
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go
new file mode 100644
index 000000000..8683f0291
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/benumericmatcher.go
@@ -0,0 +1,128 @@
+package matcher
+
+import (
+ "go/ast"
+ "go/constant"
+ "go/token"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+type BeNumericallyMatcher struct {
+ op token.Token
+ value value.Valuer
+ argType Type
+}
+
+var compareOps = map[string]token.Token{
+ `"=="`: token.EQL,
+ `"<"`: token.LSS,
+ `">"`: token.GTR,
+ `"="`: token.ASSIGN,
+ `"!="`: token.NEQ,
+ `"<="`: token.LEQ,
+ `">="`: token.GEQ,
+}
+
+func getCompareOp(opExp ast.Expr) token.Token {
+ basic, ok := opExp.(*ast.BasicLit)
+ if !ok {
+ return token.ILLEGAL
+ }
+ if basic.Kind != token.STRING {
+ return token.ILLEGAL
+ }
+
+ if tk, ok := compareOps[basic.Value]; ok {
+ return tk
+ }
+
+ return token.ILLEGAL
+}
+
+func newBeNumericallyMatcher(opExp, orig, clone ast.Expr, pass *analysis.Pass) Info {
+ op := getCompareOp(opExp)
+ if op == token.ILLEGAL {
+ return &UnspecifiedMatcher{
+ matcherName: beNumerically,
+ }
+ }
+
+ val := value.GetValuer(orig, clone, pass)
+ argType := BeNumericallyMatcherType
+
+ if val.IsValueNumeric() {
+ if v := val.GetValue().String(); v == "0" {
+ switch op {
+ case token.EQL:
+ argType |= EqualZero
+
+ case token.NEQ, token.GTR:
+ argType |= GreaterThanZero
+ }
+ } else if v == "1" && op == token.GEQ {
+ argType |= GreaterThanZero
+ }
+ }
+
+ return &BeNumericallyMatcher{
+ op: op,
+ value: val,
+ argType: argType,
+ }
+}
+
+func (m BeNumericallyMatcher) Type() Type {
+ return m.argType
+}
+
+func (BeNumericallyMatcher) MatcherName() string {
+ return beNumerically
+}
+
+func (m BeNumericallyMatcher) GetValueExpr() ast.Expr {
+ return m.value.GetValueExpr()
+}
+
+func (m BeNumericallyMatcher) GetValue() constant.Value {
+ return m.value.GetValue()
+}
+
+func (m BeNumericallyMatcher) GetType() gotypes.Type {
+ return m.value.GetType()
+}
+
+func (m BeNumericallyMatcher) GetOp() token.Token {
+ return m.op
+}
+
+func (m BeNumericallyMatcher) IsValueZero() bool {
+ return m.value.IsValueZero()
+}
+
+func (m BeNumericallyMatcher) IsValueInt() bool {
+ return m.value.IsValueInt()
+}
+
+func (m BeNumericallyMatcher) IsValueNumeric() bool {
+ return m.value.IsValueNumeric()
+}
+
+func (m BeNumericallyMatcher) IsError() bool {
+ return m.value.IsError()
+}
+
+func (m BeNumericallyMatcher) IsFunc() bool {
+ return m.value.IsFunc()
+}
+
+func (m BeNumericallyMatcher) IsInterface() bool {
+ return m.value.IsInterface()
+}
+
+func (m BeNumericallyMatcher) IsPointer() bool {
+ return m.value.IsPointer()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go
new file mode 100644
index 000000000..8cee8e408
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/equalmatcher.go
@@ -0,0 +1,124 @@
+package matcher
+
+import (
+ "go/ast"
+ "go/constant"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+)
+
+func newEqualMatcher(orig, clone ast.Expr, pass *analysis.Pass) Info {
+ t := pass.TypesInfo.Types[orig]
+
+ if t.Value != nil {
+ if t.Value.Kind() == constant.Bool {
+ if t.Value.String() == "true" {
+ return &EqualTrueMatcher{}
+ }
+ return &EqualFalseMatcher{}
+ }
+ }
+
+ if value.IsNil(orig, pass) {
+ return &EqualNilMatcher{
+ gotype: pass.TypesInfo.TypeOf(orig),
+ }
+ }
+
+ val := value.GetValuer(orig, clone, pass)
+
+ return &EqualMatcher{
+ val: val,
+ }
+}
+
+type EqualMatcher struct {
+ val value.Valuer
+}
+
+func (EqualMatcher) Type() Type {
+ return EqualMatcherType
+}
+
+func (EqualMatcher) MatcherName() string {
+ return equal
+}
+
+func (m EqualMatcher) GetValue() constant.Value {
+ return m.val.GetValue()
+}
+
+func (m EqualMatcher) GetType() gotypes.Type {
+ return m.val.GetType()
+}
+
+func (m EqualMatcher) GetValueExpr() ast.Expr {
+ return m.val.GetValueExpr()
+}
+
+func (m EqualMatcher) IsValueZero() bool {
+ return m.val.IsValueZero()
+}
+
+func (m EqualMatcher) IsValueInt() bool {
+ return m.val.IsValueInt()
+}
+
+func (m EqualMatcher) IsValueNumeric() bool {
+ return m.val.IsValueNumeric()
+}
+
+func (m EqualMatcher) IsError() bool {
+ return m.val.IsError()
+}
+
+func (m EqualMatcher) IsFunc() bool {
+ return m.val.IsFunc()
+}
+
+func (m EqualMatcher) IsInterface() bool {
+ return m.val.IsInterface()
+}
+
+func (m EqualMatcher) IsPointer() bool {
+ return m.val.IsPointer()
+}
+
+type EqualNilMatcher struct {
+ gotype gotypes.Type
+}
+
+func (EqualNilMatcher) Type() Type {
+ return EqualNilMatcherType | EqualMatcherType | EqualValueMatcherType
+}
+
+func (EqualNilMatcher) MatcherName() string {
+ return equal
+}
+
+func (n EqualNilMatcher) GetType() gotypes.Type {
+ return n.gotype
+}
+
+type EqualTrueMatcher struct{}
+
+func (EqualTrueMatcher) Type() Type {
+ return EqualMatcherType | EqualBoolValueMatcherType | BoolValueTrue
+}
+
+func (EqualTrueMatcher) MatcherName() string {
+ return equal
+}
+
+type EqualFalseMatcher struct{}
+
+func (EqualFalseMatcher) Type() Type {
+ return EqualMatcherType | EqualBoolValueMatcherType | BoolValueFalse
+}
+
+func (EqualFalseMatcher) MatcherName() string {
+ return equal
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go
new file mode 100644
index 000000000..a493287e0
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/errormatchers.go
@@ -0,0 +1,199 @@
+package matcher
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+type HaveOccurredMatcher struct{}
+
+func (m *HaveOccurredMatcher) Type() Type {
+ return HaveOccurredMatcherType
+}
+func (m *HaveOccurredMatcher) MatcherName() string {
+ return haveOccurred
+}
+
+type SucceedMatcher struct{}
+
+func (m *SucceedMatcher) Type() Type {
+ return SucceedMatcherType
+}
+func (m *SucceedMatcher) MatcherName() string {
+ return succeed
+}
+
+type MatchErrorMatcher interface {
+ Info
+ AllowedNumArgs() int
+ NumArgs() int
+}
+
+type InvalidMatchErrorMatcher struct {
+ firstAgr ast.Expr
+ numArgs int
+}
+
+func (m *InvalidMatchErrorMatcher) Type() Type {
+ return MatchErrorMatcherType
+}
+
+func (m *InvalidMatchErrorMatcher) MatcherName() string {
+ return matchError
+}
+
+func (m *InvalidMatchErrorMatcher) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *InvalidMatchErrorMatcher) NumArgs() int {
+ return m.numArgs
+}
+
+func (m *InvalidMatchErrorMatcher) GetValueExpr() ast.Expr {
+ return m.firstAgr
+}
+
+type MatchErrorMatcherWithErr struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithErr) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithErr
+}
+
+func (m *MatchErrorMatcherWithErr) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithErr) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithErr) NumArgs() int {
+ return m.numArgs
+}
+
+type MatchErrorMatcherWithErrFunc struct {
+ numArgs int
+ secondArgIsString bool
+}
+
+func (m *MatchErrorMatcherWithErrFunc) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithErrFunc
+}
+
+func (m *MatchErrorMatcherWithErrFunc) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithErrFunc) AllowedNumArgs() int {
+ return 2
+}
+
+func (m *MatchErrorMatcherWithErrFunc) NumArgs() int {
+ return m.numArgs
+}
+
+func (m *MatchErrorMatcherWithErrFunc) IsSecondArgString() bool {
+ return m.secondArgIsString
+}
+
+type MatchErrorMatcherWithString struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithString) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithString
+}
+
+func (m *MatchErrorMatcherWithString) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithString) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithString) NumArgs() int {
+ return m.numArgs
+}
+
+type MatchErrorMatcherWithMatcher struct {
+ numArgs int
+}
+
+func (m *MatchErrorMatcherWithMatcher) Type() Type {
+ return MatchErrorMatcherType | ErrMatchWithMatcher
+}
+
+func (m *MatchErrorMatcherWithMatcher) MatcherName() string {
+ return matchError
+}
+
+func (m *MatchErrorMatcherWithMatcher) AllowedNumArgs() int {
+ return 1
+}
+
+func (m *MatchErrorMatcherWithMatcher) NumArgs() int {
+ return m.numArgs
+}
+
+func newMatchErrorMatcher(args []ast.Expr, pass *analysis.Pass) MatchErrorMatcher {
+ numArgs := len(args)
+ if value.IsExprError(pass, args[0]) {
+ return &MatchErrorMatcherWithErr{numArgs: numArgs}
+ }
+
+ t := pass.TypesInfo.TypeOf(args[0])
+ if isString(args[0], pass) {
+ return &MatchErrorMatcherWithString{numArgs: numArgs}
+ }
+
+ if interfaces.ImplementsGomegaMatcher(t) {
+ return &MatchErrorMatcherWithMatcher{numArgs: numArgs}
+ }
+
+ if isFuncErrBool(t) {
+ isString := false
+ if numArgs > 1 {
+ t2 := pass.TypesInfo.TypeOf(args[1])
+ isString = gotypes.Identical(t2, gotypes.Typ[gotypes.String])
+ }
+ return &MatchErrorMatcherWithErrFunc{numArgs: numArgs, secondArgIsString: isString}
+ }
+
+ return &InvalidMatchErrorMatcher{numArgs: numArgs}
+}
+
+func isString(exp ast.Expr, pass *analysis.Pass) bool {
+ t := pass.TypesInfo.TypeOf(exp)
+ return gotypes.Identical(t, gotypes.Typ[gotypes.String])
+}
+
+// isFuncErrBool checks if a function is with the signature `func(error) bool`
+func isFuncErrBool(t gotypes.Type) bool {
+ sig, ok := t.(*gotypes.Signature)
+ if !ok {
+ return false
+ }
+ if sig.Params().Len() != 1 || sig.Results().Len() != 1 {
+ return false
+ }
+
+ if !interfaces.ImplementsError(sig.Params().At(0).Type()) {
+ return false
+ }
+
+ b, ok := sig.Results().At(0).Type().(*gotypes.Basic)
+ if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool {
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go
new file mode 100644
index 000000000..8e4f438e8
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/lenmatchers.go
@@ -0,0 +1,11 @@
+package matcher
+
+type HaveLenZeroMatcher struct{}
+
+func (HaveLenZeroMatcher) Type() Type {
+ return HaveLenZeroMatcherType
+}
+
+func (HaveLenZeroMatcher) MatcherName() string {
+ return haveLen
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go
new file mode 100644
index 000000000..0969b9551
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcher.go
@@ -0,0 +1,86 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+const ( // gomega matchers
+ beEmpty = "BeEmpty"
+ beEquivalentTo = "BeEquivalentTo"
+ beFalse = "BeFalse"
+ beIdenticalTo = "BeIdenticalTo"
+ beNil = "BeNil"
+ beNumerically = "BeNumerically"
+ beTrue = "BeTrue"
+ beZero = "BeZero"
+ equal = "Equal"
+ haveLen = "HaveLen"
+ haveValue = "HaveValue"
+ and = "And"
+ or = "Or"
+ withTransform = "WithTransform"
+ matchError = "MatchError"
+ haveOccurred = "HaveOccurred"
+ succeed = "Succeed"
+)
+
+type Matcher struct {
+ funcName string
+ Orig *ast.CallExpr
+ Clone *ast.CallExpr
+ info Info
+ reverseLogic bool
+ handler gomegahandler.Handler
+}
+
+func New(origMatcher, matcherClone *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) {
+ reverse := false
+ var assertFuncName string
+ for {
+ ok := false
+ assertFuncName, ok = handler.GetActualFuncName(origMatcher)
+ if !ok {
+ return nil, false
+ }
+
+ if assertFuncName != "Not" {
+ break
+ }
+
+ reverse = !reverse
+ origMatcher, ok = origMatcher.Args[0].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+ matcherClone = matcherClone.Args[0].(*ast.CallExpr)
+ }
+
+ return &Matcher{
+ funcName: assertFuncName,
+ Orig: origMatcher,
+ Clone: matcherClone,
+ info: getMatcherInfo(origMatcher, matcherClone, assertFuncName, pass, handler),
+ reverseLogic: reverse,
+ handler: handler,
+ }, true
+}
+
+func (m *Matcher) ShouldReverseLogic() bool {
+ return m.reverseLogic
+}
+
+func (m *Matcher) GetMatcherInfo() Info {
+ return m.info
+}
+
+func (m *Matcher) ReplaceMatcherFuncName(name string) {
+ m.handler.ReplaceFunction(m.Clone, ast.NewIdent(name))
+}
+
+func (m *Matcher) ReplaceMatcherArgs(newArgs []ast.Expr) {
+ m.Clone.Args = newArgs
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go
new file mode 100644
index 000000000..084226bcc
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherinfo.go
@@ -0,0 +1,148 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type Type uint64
+
+const (
+ Unspecified Type = 1 << iota
+ EqualMatcherType
+ BeZeroMatcherType
+ BeEmptyMatcherType
+ BeTrueMatcherType
+ BeFalseMatcherType
+ BeNumericallyMatcherType
+ HaveLenZeroMatcherType
+ BeEquivalentToMatcherType
+ BeIdenticalToMatcherType
+ BeNilMatcherType
+ MatchErrorMatcherType
+ MultipleMatcherMatherType
+ HaveValueMatherType
+ WithTransformMatherType
+ EqualBoolValueMatcherType
+ EqualValueMatcherType
+ HaveOccurredMatcherType
+ SucceedMatcherType
+ EqualNilMatcherType
+
+ BoolValueFalse
+ BoolValueTrue
+
+ OrMatherType
+ AndMatherType
+
+ ErrMatchWithErr
+ ErrMatchWithErrFunc
+ ErrMatchWithString
+ ErrMatchWithMatcher
+
+ EqualZero
+ GreaterThanZero
+)
+
+type Info interface {
+ Type() Type
+ MatcherName() string
+}
+
+func getMatcherInfo(orig, clone *ast.CallExpr, matcherName string, pass *analysis.Pass, handler gomegahandler.Handler) Info {
+ switch matcherName {
+ case equal:
+ return newEqualMatcher(orig.Args[0], clone.Args[0], pass)
+
+ case beZero:
+ return &BeZeroMatcher{}
+
+ case beEmpty:
+ return &BeEmptyMatcher{}
+
+ case beTrue:
+ return &BeTrueMatcher{}
+
+ case beFalse:
+ return &BeFalseMatcher{}
+
+ case beNil:
+ return &BeNilMatcher{}
+
+ case beNumerically:
+ if len(orig.Args) == 2 {
+ return newBeNumericallyMatcher(orig.Args[0], orig.Args[1], clone.Args[1], pass)
+ }
+
+ case haveLen:
+ if value.GetValuer(orig.Args[0], clone.Args[0], pass).IsValueZero() {
+ return &HaveLenZeroMatcher{}
+ }
+
+ case beEquivalentTo:
+ return &BeEquivalentToMatcher{
+ Value: value.New(orig.Args[0], clone.Args[0], pass),
+ }
+
+ case beIdenticalTo:
+ return &BeIdenticalToMatcher{
+ Value: value.New(orig.Args[0], clone.Args[0], pass),
+ }
+
+ case matchError:
+ return newMatchErrorMatcher(orig.Args, pass)
+
+ case haveValue:
+ if nestedMatcher, ok := getNestedMatcher(orig, clone, 0, pass, handler); ok {
+ return &HaveValueMatcher{
+ nested: nestedMatcher,
+ }
+ }
+
+ case withTransform:
+ if nestedMatcher, ok := getNestedMatcher(orig, clone, 1, pass, handler); ok {
+ return newWithTransformMatcher(orig.Args[0], nestedMatcher, pass)
+ }
+
+ case or, and:
+ matcherType := MultipleMatcherMatherType
+ if matcherName == or {
+ matcherType |= OrMatherType
+ } else {
+ matcherType |= AndMatherType
+ }
+
+ if m, ok := newMultipleMatchersMatcher(matcherType, orig.Args, clone.Args, pass, handler); ok {
+ return m
+ }
+
+ case succeed:
+ return &SucceedMatcher{}
+
+ case haveOccurred:
+ return &HaveOccurredMatcher{}
+
+ }
+
+ return &UnspecifiedMatcher{matcherName: matcherName}
+}
+
+type UnspecifiedMatcher struct {
+ matcherName string
+}
+
+func (UnspecifiedMatcher) Type() Type {
+ return Unspecified
+}
+
+func (u UnspecifiedMatcher) MatcherName() string {
+ return u.matcherName
+}
+
+func (t Type) Is(other Type) bool {
+ return t&other != 0
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go
new file mode 100644
index 000000000..cc26e5ac2
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/matcherwithnest.go
@@ -0,0 +1,66 @@
+package matcher
+
+import (
+ "go/ast"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type HaveValueMatcher struct {
+ nested *Matcher
+}
+
+func (m *HaveValueMatcher) Type() Type {
+ return HaveValueMatherType
+}
+func (m *HaveValueMatcher) MatcherName() string {
+ return haveValue
+}
+
+func (m *HaveValueMatcher) GetNested() *Matcher {
+ return m.nested
+}
+
+type WithTransformMatcher struct {
+ funcType gotypes.Type
+ nested *Matcher
+}
+
+func (m *WithTransformMatcher) Type() Type {
+ return WithTransformMatherType
+}
+func (m *WithTransformMatcher) MatcherName() string {
+ return withTransform
+}
+
+func (m *WithTransformMatcher) GetNested() *Matcher {
+ return m.nested
+}
+
+func (m *WithTransformMatcher) GetFuncType() gotypes.Type {
+ return m.funcType
+}
+
+func getNestedMatcher(orig, clone *ast.CallExpr, offset int, pass *analysis.Pass, handler gomegahandler.Handler) (*Matcher, bool) {
+ if origNested, ok := orig.Args[offset].(*ast.CallExpr); ok {
+ cloneNested := clone.Args[offset].(*ast.CallExpr)
+
+ return New(origNested, cloneNested, pass, handler)
+ }
+
+ return nil, false
+}
+
+func newWithTransformMatcher(fun ast.Expr, nested *Matcher, pass *analysis.Pass) *WithTransformMatcher {
+ funcType := pass.TypesInfo.TypeOf(fun)
+ if sig, ok := funcType.(*gotypes.Signature); ok && sig.Results().Len() > 0 {
+ funcType = sig.Results().At(0).Type()
+ }
+ return &WithTransformMatcher{
+ funcType: funcType,
+ nested: nested,
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go
new file mode 100644
index 000000000..9ce0cf5b8
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/matcher/multiplematchers.go
@@ -0,0 +1,62 @@
+package matcher
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
+)
+
+type MultipleMatchersMatcher struct {
+ matherType Type
+ matchers []*Matcher
+}
+
+func (m *MultipleMatchersMatcher) Type() Type {
+ return m.matherType
+}
+
+func (m *MultipleMatchersMatcher) MatcherName() string {
+ if m.matherType.Is(OrMatherType) {
+ return or
+ }
+ return and
+}
+
+func newMultipleMatchersMatcher(matherType Type, orig, clone []ast.Expr, pass *analysis.Pass, handler gomegahandler.Handler) (*MultipleMatchersMatcher, bool) {
+ matchers := make([]*Matcher, len(orig))
+
+ for i := range orig {
+ nestedOrig, ok := orig[i].(*ast.CallExpr)
+ if !ok {
+ return nil, false
+ }
+
+ m, ok := New(nestedOrig, clone[i].(*ast.CallExpr), pass, handler)
+ if !ok {
+ return nil, false
+ }
+
+ m.reverseLogic = false
+
+ matchers[i] = m
+ }
+
+ return &MultipleMatchersMatcher{
+ matherType: matherType,
+ matchers: matchers,
+ }, true
+}
+
+func (m *MultipleMatchersMatcher) Len() int {
+ return len(m.matchers)
+}
+
+func (m *MultipleMatchersMatcher) At(i int) *Matcher {
+ if i >= len(m.matchers) {
+ panic("index out of range")
+ }
+
+ return m.matchers[i]
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go
new file mode 100644
index 000000000..dda0dd73b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/expression/value/value.go
@@ -0,0 +1,221 @@
+package value
+
+import (
+ "go/ast"
+ "go/constant"
+ gotypes "go/types"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/interfaces"
+)
+
+type Valuer interface {
+ GetValue() constant.Value
+ GetType() gotypes.Type
+ GetValueExpr() ast.Expr
+ IsValueZero() bool
+ IsValueInt() bool
+ IsValueNumeric() bool
+ IsError() bool
+ IsFunc() bool
+ IsInterface() bool
+ IsPointer() bool
+}
+
+func GetValuer(orig, clone ast.Expr, pass *analysis.Pass) Valuer {
+ val := New(orig, clone, pass)
+ unspecified := UnspecifiedValue{
+ Value: val,
+ }
+
+ if orig == nil {
+ return unspecified
+ }
+
+ if IsExprError(pass, orig) {
+ return &ErrValue{
+ Value: val,
+ err: clone,
+ }
+ }
+
+ if val.GetValue() == nil || !val.tv.IsValue() {
+ return unspecified
+ }
+
+ if val.GetValue().Kind() == constant.Int {
+ num, ok := constant.Int64Val(val.GetValue())
+ if !ok {
+ return unspecified
+ }
+ return &IntValue{
+ Value: val,
+ val: num,
+ }
+ }
+
+ return unspecified
+}
+
+type Value struct {
+ expr ast.Expr
+ tv gotypes.TypeAndValue
+}
+
+func New(orig, clone ast.Expr, pass *analysis.Pass) Value {
+ tv := pass.TypesInfo.Types[orig]
+
+ return Value{
+ expr: clone,
+ tv: tv,
+ }
+}
+
+func (v Value) GetValueExpr() ast.Expr {
+ return v.expr
+}
+
+func (v Value) GetValue() constant.Value {
+ return v.tv.Value
+}
+
+func (v Value) GetType() gotypes.Type {
+ return v.tv.Type
+}
+
+func (v Value) IsInterface() bool {
+ return gotypes.IsInterface(v.tv.Type)
+}
+
+func (v Value) IsPointer() bool {
+ return Is[*gotypes.Pointer](v.tv.Type)
+}
+
+func (v Value) IsNil() bool {
+ return v.tv.IsNil()
+}
+
+type UnspecifiedValue struct {
+ Value
+}
+
+func (u UnspecifiedValue) IsValueZero() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsValueInt() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsValueNumeric() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsError() bool {
+ return false
+}
+
+func (u UnspecifiedValue) IsFunc() bool {
+ return isFunc(u.GetValueExpr())
+}
+
+type ErrValue struct {
+ Value
+ err ast.Expr
+}
+
+func (e ErrValue) IsValueZero() bool {
+ return false
+}
+
+func (e ErrValue) IsValueInt() bool {
+ return false
+}
+
+func (e ErrValue) IsValueNumeric() bool {
+ return false
+}
+
+func (e ErrValue) IsError() bool {
+ return true
+}
+
+func (e ErrValue) IsFunc() bool {
+ return isFunc(e.GetValueExpr())
+}
+
+type IntValuer interface {
+ GetIntValue() int64
+}
+
+type IntValue struct {
+ Value
+ val int64
+}
+
+func (i IntValue) IsValueZero() bool {
+ return i.val == 0
+}
+
+func (i IntValue) IsValueInt() bool {
+ return i.val == 0
+}
+
+func (i IntValue) IsValueNumeric() bool {
+ return true
+}
+
+func (i IntValue) IsError() bool {
+ return false
+}
+
+func (i IntValue) IsFunc() bool {
+ return false
+}
+
+func (i IntValue) GetIntValue() int64 {
+ return i.val
+}
+
+func isFunc(exp ast.Expr) bool {
+ return Is[*ast.CallExpr](exp)
+}
+
+func Is[T any](x any) bool {
+ _, matchType := x.(T)
+ return matchType
+}
+
+func IsExprError(pass *analysis.Pass, expr ast.Expr) bool {
+ actualArgType := pass.TypesInfo.TypeOf(expr)
+ switch t := actualArgType.(type) {
+ case *gotypes.Named:
+ return interfaces.ImplementsError(actualArgType)
+
+ case *gotypes.Pointer:
+ if tt, ok := t.Elem().(*gotypes.Named); ok {
+ return interfaces.ImplementsError(tt)
+ }
+
+ case *gotypes.Tuple:
+ if t.Len() > 0 {
+ switch t0 := t.At(0).Type().(type) {
+ case *gotypes.Named, *gotypes.Pointer:
+ if interfaces.ImplementsError(t0) {
+ return true
+ }
+ }
+ }
+ }
+ return false
+}
+
+func IsNil(exp ast.Expr, pass *analysis.Pass) bool {
+ id, ok := exp.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ return pass.TypesInfo.Types[id].IsNil()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go
new file mode 100644
index 000000000..64f3d99ad
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/formatter/formatter.go
@@ -0,0 +1,22 @@
+package formatter
+
+import (
+ "bytes"
+ "go/ast"
+ "go/printer"
+ "go/token"
+)
+
+type GoFmtFormatter struct {
+ fset *token.FileSet
+}
+
+func NewGoFmtFormatter(fset *token.FileSet) *GoFmtFormatter {
+ return &GoFmtFormatter{fset: fset}
+}
+
+func (f GoFmtFormatter) Format(exp ast.Expr) string {
+ var buf bytes.Buffer
+ _ = printer.Fprint(&buf, f.fset, exp)
+ return buf.String()
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go
new file mode 100644
index 000000000..9c54b4334
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/dothandler.go
@@ -0,0 +1,36 @@
+package ginkgohandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// dotHandler is used when importing ginkgo with dot; i.e.
+// import . "github.com/onsi/ginkgo"
+type dotHandler struct{}
+
+func (h dotHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool {
+ return handleGinkgoSpecs(expr, config, pass, h)
+}
+
+func (h dotHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
+ if fun, ok := exp.Fun.(*ast.Ident); ok {
+ return isFocusContainer(fun.Name), fun
+ }
+ return false, nil
+}
+
+func (h dotHandler) isWrapContainer(exp *ast.CallExpr) bool {
+ if fun, ok := exp.Fun.(*ast.Ident); ok {
+ return isWrapContainer(fun.Name)
+ }
+ return false
+}
+
+func (h dotHandler) isFocusSpec(exp ast.Expr) bool {
+ id, ok := exp.(*ast.Ident)
+ return ok && id.Name == focusSpec
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go
new file mode 100644
index 000000000..d8bb75399
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/ginkgoinfo.go
@@ -0,0 +1,63 @@
+package ginkgohandler
+
+const ( // container names
+ describe = "Describe"
+ pdescribe = "PDescribe"
+ xdescribe = "XDescribe"
+ fdescribe = "FDescribe"
+
+ when = "When"
+ pwhen = "PWhen"
+ xwhen = "XWhen"
+ fwhen = "FWhen"
+
+ contextContainer = "Context"
+ pcontext = "PContext"
+ xcontext = "XContext"
+ fcontext = "FContext"
+
+ it = "It"
+ pit = "PIt"
+ xit = "XIt"
+ fit = "FIt"
+
+ describeTable = "DescribeTable"
+ pdescribeTable = "PDescribeTable"
+ xdescribeTable = "XDescribeTable"
+ fdescribeTable = "FDescribeTable"
+
+ entry = "Entry"
+ pentry = "PEntry"
+ xentry = "XEntry"
+ fentry = "FEntry"
+)
+
+func isFocusContainer(name string) bool {
+ switch name {
+ case fdescribe, fcontext, fwhen, fit, fdescribeTable, fentry:
+ return true
+ }
+ return false
+}
+
+func isContainer(name string) bool {
+ switch name {
+ case it, when, contextContainer, describe, describeTable, entry,
+ pit, pwhen, pcontext, pdescribe, pdescribeTable, pentry,
+ xit, xwhen, xcontext, xdescribe, xdescribeTable, xentry:
+ return true
+ }
+ return isFocusContainer(name)
+}
+
+func isWrapContainer(name string) bool {
+ switch name {
+ case when, contextContainer, describe,
+ fwhen, fcontext, fdescribe,
+ pwhen, pcontext, pdescribe,
+ xwhen, xcontext, xdescribe:
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
index f10d83184..c44e3e8d8 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handler.go
@@ -2,6 +2,10 @@ package ginkgohandler
import (
"go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
)
const (
@@ -14,116 +18,31 @@ const (
// Handler provide different handling, depend on the way ginkgo was imported, whether
// in imported with "." name, custom name or without any name.
type Handler interface {
- GetFocusContainerName(*ast.CallExpr) (bool, *ast.Ident)
- IsWrapContainer(*ast.CallExpr) bool
- IsFocusSpec(ident ast.Expr) bool
+ HandleGinkgoSpecs(ast.Expr, types.Config, *analysis.Pass) bool
+ getFocusContainerName(*ast.CallExpr) (bool, *ast.Ident)
+ isWrapContainer(*ast.CallExpr) bool
+ isFocusSpec(ident ast.Expr) bool
}
// GetGinkgoHandler returns a ginkgor handler according to the way ginkgo was imported in the specific file
func GetGinkgoHandler(file *ast.File) Handler {
for _, imp := range file.Imports {
- if imp.Path.Value != importPath && imp.Path.Value != importPathV2 {
- continue
- }
+ switch imp.Path.Value {
+
+ case importPath, importPathV2:
+ switch name := imp.Name.String(); {
+ case name == ".":
+ return dotHandler{}
+ case name == "<nil>": // import with no local name
+ return nameHandler("ginkgo")
+ default:
+ return nameHandler(name)
+ }
- switch name := imp.Name.String(); {
- case name == ".":
- return dotHandler{}
- case name == "<nil>": // import with no local name
- return nameHandler("ginkgo")
default:
- return nameHandler(name)
- }
- }
-
- return nil // no ginkgo import; this file does not use ginkgo
-}
-
-// dotHandler is used when importing ginkgo with dot; i.e.
-// import . "github.com/onsi/ginkgo"
-type dotHandler struct{}
-
-func (h dotHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
- if fun, ok := exp.Fun.(*ast.Ident); ok {
- return isFocusContainer(fun.Name), fun
- }
- return false, nil
-}
-
-func (h dotHandler) IsWrapContainer(exp *ast.CallExpr) bool {
- if fun, ok := exp.Fun.(*ast.Ident); ok {
- return IsWrapContainer(fun.Name)
- }
- return false
-}
-
-func (h dotHandler) IsFocusSpec(exp ast.Expr) bool {
- id, ok := exp.(*ast.Ident)
- return ok && id.Name == focusSpec
-}
-
-// nameHandler is used when importing ginkgo without name; i.e.
-// import "github.com/onsi/ginkgo"
-//
-// or with a custom name; e.g.
-// import customname "github.com/onsi/ginkgo"
-type nameHandler string
-
-func (h nameHandler) GetFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
- if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
- if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
- return isFocusContainer(sel.Sel.Name), sel.Sel
- }
- }
- return false, nil
-}
-
-func (h nameHandler) IsWrapContainer(exp *ast.CallExpr) bool {
- if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
- if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
- return IsWrapContainer(sel.Sel.Name)
- }
- }
- return false
-
-}
-
-func (h nameHandler) IsFocusSpec(exp ast.Expr) bool {
- if selExp, ok := exp.(*ast.SelectorExpr); ok {
- if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) {
- return selExp.Sel.Name == focusSpec
+ continue
}
}
- return false
-}
-
-func isFocusContainer(name string) bool {
- switch name {
- case "FDescribe", "FContext", "FWhen", "FIt", "FDescribeTable", "FEntry":
- return true
- }
- return false
-}
-
-func IsContainer(name string) bool {
- switch name {
- case "It", "When", "Context", "Describe", "DescribeTable", "Entry",
- "PIt", "PWhen", "PContext", "PDescribe", "PDescribeTable", "PEntry",
- "XIt", "XWhen", "XContext", "XDescribe", "XDescribeTable", "XEntry":
- return true
- }
- return isFocusContainer(name)
-}
-
-func IsWrapContainer(name string) bool {
- switch name {
- case "When", "Context", "Describe",
- "FWhen", "FContext", "FDescribe",
- "PWhen", "PContext", "PDescribe",
- "XWhen", "XContext", "XDescribe":
- return true
- }
-
- return false
+ return nil
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go
new file mode 100644
index 000000000..4b6de5767
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/handling.go
@@ -0,0 +1,195 @@
+package ginkgohandler
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ linterName = "ginkgo-linter"
+ focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q"
+ focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it"
+ useBeforeEachTemplate = "use BeforeEach() to assign variable %s"
+)
+
+func handleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass, ginkgoHndlr Handler) bool {
+ goDeeper := false
+ if exp, ok := expr.(*ast.CallExpr); ok {
+ if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) {
+ goDeeper = true
+ }
+
+ if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) {
+ goDeeper = true
+ }
+ }
+ return goDeeper
+}
+
+func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr Handler, exp *ast.CallExpr) bool {
+ foundSomething := false
+ if ginkgoHndlr.isWrapContainer(exp) {
+ for _, arg := range exp.Args {
+ if fn, ok := arg.(*ast.FuncLit); ok {
+ if fn.Body != nil {
+ if checkAssignments(pass, fn.Body.List) {
+ foundSomething = true
+ }
+ break
+ }
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool {
+ foundSomething := false
+ for _, stmt := range list {
+ switch st := stmt.(type) {
+ case *ast.DeclStmt:
+ if checkAssignmentDecl(pass, st) {
+ foundSomething = true
+ }
+
+ case *ast.AssignStmt:
+ if checkAssignmentAssign(pass, st) {
+ foundSomething = true
+ }
+
+ case *ast.IfStmt:
+ if checkAssignmentIf(pass, st) {
+ foundSomething = true
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool {
+ foundSomething := false
+ for i, val := range values {
+ if !is[*ast.FuncLit](val) {
+ reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name)
+ foundSomething = true
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentDecl(pass *analysis.Pass, ds *ast.DeclStmt) bool {
+ foundSomething := false
+ if gen, ok := ds.Decl.(*ast.GenDecl); ok {
+ if gen.Tok != token.VAR {
+ return false
+ }
+ for _, spec := range gen.Specs {
+ if valSpec, ok := spec.(*ast.ValueSpec); ok {
+ if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) {
+ foundSomething = true
+ }
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkAssignmentAssign(pass *analysis.Pass, as *ast.AssignStmt) bool {
+ foundSomething := false
+ for i, val := range as.Rhs {
+ if !is[*ast.FuncLit](val) {
+ if id, isIdent := as.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" {
+ reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name)
+ foundSomething = true
+ }
+ }
+ }
+ return foundSomething
+}
+
+func checkAssignmentIf(pass *analysis.Pass, is *ast.IfStmt) bool {
+ foundSomething := false
+
+ if is.Body != nil {
+ if checkAssignments(pass, is.Body.List) {
+ foundSomething = true
+ }
+ }
+ if is.Else != nil {
+ if block, isBlock := is.Else.(*ast.BlockStmt); isBlock {
+ if checkAssignments(pass, block.List) {
+ foundSomething = true
+ }
+ }
+ }
+
+ return foundSomething
+}
+
+func checkFocusContainer(pass *analysis.Pass, handler Handler, exp *ast.CallExpr) bool {
+ foundFocus := false
+ isFocus, id := handler.getFocusContainerName(exp)
+ if isFocus {
+ reportNewName(pass, id, id.Name[1:], id.Name)
+ foundFocus = true
+ }
+
+ if id != nil && isContainer(id.Name) {
+ for _, arg := range exp.Args {
+ if handler.isFocusSpec(arg) {
+ reportNoFix(pass, arg.Pos(), focusSpecFound)
+ foundFocus = true
+ } else if callExp, ok := arg.(*ast.CallExpr); ok {
+ if checkFocusContainer(pass, handler, callExp) { // handle table entries
+ foundFocus = true
+ }
+ }
+ }
+ }
+
+ return foundFocus
+}
+
+func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, oldExpr string) {
+ pass.Report(analysis.Diagnostic{
+ Pos: id.Pos(),
+ Message: fmt.Sprintf(focusContainerFound, newName),
+ SuggestedFixes: []analysis.SuggestedFix{
+ {
+ Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName),
+ TextEdits: []analysis.TextEdit{
+ {
+ Pos: id.Pos(),
+ End: id.End(),
+ NewText: []byte(newName),
+ },
+ },
+ },
+ },
+ })
+}
+
+func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) {
+ if len(args) > 0 {
+ message = fmt.Sprintf(message, args...)
+ }
+
+ pass.Report(analysis.Diagnostic{
+ Pos: pos,
+ Message: message,
+ })
+}
+
+func is[T any](x any) bool {
+ _, matchType := x.(T)
+ return matchType
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go
new file mode 100644
index 000000000..2ef9fe703
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/ginkgohandler/namehandler.go
@@ -0,0 +1,49 @@
+package ginkgohandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// nameHandler is used when importing ginkgo without name; i.e.
+// import "github.com/onsi/ginkgo"
+//
+// or with a custom name; e.g.
+// import customname "github.com/onsi/ginkgo"
+type nameHandler string
+
+func (h nameHandler) HandleGinkgoSpecs(expr ast.Expr, config types.Config, pass *analysis.Pass) bool {
+ return handleGinkgoSpecs(expr, config, pass, h)
+}
+
+func (h nameHandler) getFocusContainerName(exp *ast.CallExpr) (bool, *ast.Ident) {
+ if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
+ if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
+ return isFocusContainer(sel.Sel.Name), sel.Sel
+ }
+ }
+ return false, nil
+}
+
+func (h nameHandler) isWrapContainer(exp *ast.CallExpr) bool {
+ if sel, ok := exp.Fun.(*ast.SelectorExpr); ok {
+ if id, ok := sel.X.(*ast.Ident); ok && id.Name == string(h) {
+ return isWrapContainer(sel.Sel.Name)
+ }
+ }
+ return false
+
+}
+
+func (h nameHandler) isFocusSpec(exp ast.Expr) bool {
+ if selExp, ok := exp.(*ast.SelectorExpr); ok {
+ if x, ok := selExp.X.(*ast.Ident); ok && x.Name == string(h) {
+ return selExp.Sel.Name == focusSpec
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go
new file mode 100644
index 000000000..bd3b93992
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/dothandler.go
@@ -0,0 +1,99 @@
+package gomegahandler
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+)
+
+// dotHandler is used when importing gomega with dot; i.e.
+// import . "github.com/onsi/gomega"
+type dotHandler struct {
+ pass *analysis.Pass
+}
+
+// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
+func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
+ switch actualFunc := expr.Fun.(type) {
+ case *ast.Ident:
+ return actualFunc.Name, true
+ case *ast.SelectorExpr:
+ if h.isGomegaVar(actualFunc.X) {
+ return actualFunc.Sel.Name, true
+ }
+
+ if x, ok := actualFunc.X.(*ast.CallExpr); ok {
+ return h.GetActualFuncName(x)
+ }
+
+ case *ast.CallExpr:
+ return h.GetActualFuncName(actualFunc)
+ }
+ return "", false
+}
+
+// ReplaceFunction replaces the function with another one, for fix suggestions
+func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
+ switch f := caller.Fun.(type) {
+ case *ast.Ident:
+ caller.Fun = newExpr
+ case *ast.SelectorExpr:
+ f.Sel = newExpr
+ }
+}
+
+func (dotHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr {
+ return &ast.CallExpr{
+ Fun: ast.NewIdent(name),
+ Args: []ast.Expr{existing},
+ }
+}
+
+func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch fun := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if gomegainfo.IsActualMethod(fun.Sel.Name) {
+ if h.isGomegaVar(fun.X) {
+ return actualExpr
+ }
+ } else {
+ return h.GetActualExpr(fun)
+ }
+ }
+ return nil
+}
+
+func (h dotHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := funcClone.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch funClone := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr)
+ if gomegainfo.IsActualMethod(funClone.Sel.Name) {
+ if h.isGomegaVar(origFun.X) {
+ return actualExpr
+ }
+ } else {
+ return h.GetActualExprClone(origFun, funClone)
+ }
+ }
+ return nil
+}
+
+func (h dotHandler) isGomegaVar(x ast.Expr) bool {
+ return gomegainfo.IsGomegaVar(x, h.pass)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
index 4290e7373..4dba604a4 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/handler.go
@@ -2,7 +2,8 @@ package gomegahandler
import (
"go/ast"
- "go/token"
+
+ "golang.org/x/tools/go/analysis"
)
const (
@@ -17,15 +18,15 @@ type Handler interface {
// ReplaceFunction replaces the function with another one, for fix suggestions
ReplaceFunction(*ast.CallExpr, *ast.Ident)
- getDefFuncName(expr *ast.CallExpr) string
+ GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr
- getFieldType(field *ast.Field) string
+ GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr
- GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr
+ GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr
}
// GetGomegaHandler returns a gomegar handler according to the way gomega was imported in the specific file
-func GetGomegaHandler(file *ast.File) Handler {
+func GetGomegaHandler(file *ast.File, pass *analysis.Pass) Handler {
for _, imp := range file.Imports {
if imp.Path.Value != importPath {
continue
@@ -33,209 +34,15 @@ func GetGomegaHandler(file *ast.File) Handler {
switch name := imp.Name.String(); {
case name == ".":
- return dotHandler{}
+ return &dotHandler{
+ pass: pass,
+ }
case name == "<nil>": // import with no local name
- return nameHandler("gomega")
+ return &nameHandler{name: "gomega", pass: pass}
default:
- return nameHandler(name)
+ return &nameHandler{name: name, pass: pass}
}
}
return nil // no gomega import; this file does not use gomega
}
-
-// dotHandler is used when importing gomega with dot; i.e.
-// import . "github.com/onsi/gomega"
-type dotHandler struct{}
-
-// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
-func (h dotHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
- switch actualFunc := expr.Fun.(type) {
- case *ast.Ident:
- return actualFunc.Name, true
- case *ast.SelectorExpr:
- if isGomegaVar(actualFunc.X, h) {
- return actualFunc.Sel.Name, true
- }
-
- if x, ok := actualFunc.X.(*ast.CallExpr); ok {
- return h.GetActualFuncName(x)
- }
-
- case *ast.CallExpr:
- return h.GetActualFuncName(actualFunc)
- }
- return "", false
-}
-
-// ReplaceFunction replaces the function with another one, for fix suggestions
-func (dotHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
- switch f := caller.Fun.(type) {
- case *ast.Ident:
- caller.Fun = newExpr
- case *ast.SelectorExpr:
- f.Sel = newExpr
- }
-}
-
-func (dotHandler) getDefFuncName(expr *ast.CallExpr) string {
- if f, ok := expr.Fun.(*ast.Ident); ok {
- return f.Name
- }
- return ""
-}
-
-func (dotHandler) getFieldType(field *ast.Field) string {
- switch t := field.Type.(type) {
- case *ast.Ident:
- return t.Name
- case *ast.StarExpr:
- if name, ok := t.X.(*ast.Ident); ok {
- return name.Name
- }
- }
- return ""
-}
-
-// nameHandler is used when importing gomega without name; i.e.
-// import "github.com/onsi/gomega"
-//
-// or with a custom name; e.g.
-// import customname "github.com/onsi/gomega"
-type nameHandler string
-
-// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
-func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
- selector, ok := expr.Fun.(*ast.SelectorExpr)
- if !ok {
- return "", false
- }
-
- switch x := selector.X.(type) {
- case *ast.Ident:
- if x.Name != string(g) {
- if !isGomegaVar(x, g) {
- return "", false
- }
- }
-
- return selector.Sel.Name, true
-
- case *ast.CallExpr:
- return g.GetActualFuncName(x)
- }
-
- return "", false
-}
-
-// ReplaceFunction replaces the function with another one, for fix suggestions
-func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
- caller.Fun.(*ast.SelectorExpr).Sel = newExpr
-}
-
-func (g nameHandler) getDefFuncName(expr *ast.CallExpr) string {
- if sel, ok := expr.Fun.(*ast.SelectorExpr); ok {
- if f, ok := sel.X.(*ast.Ident); ok && f.Name == string(g) {
- return sel.Sel.Name
- }
- }
- return ""
-}
-
-func (g nameHandler) getFieldType(field *ast.Field) string {
- switch t := field.Type.(type) {
- case *ast.SelectorExpr:
- if id, ok := t.X.(*ast.Ident); ok {
- if id.Name == string(g) {
- return t.Sel.Name
- }
- }
- case *ast.StarExpr:
- if sel, ok := t.X.(*ast.SelectorExpr); ok {
- if x, ok := sel.X.(*ast.Ident); ok && x.Name == string(g) {
- return sel.Sel.Name
- }
- }
-
- }
- return ""
-}
-
-func isGomegaVar(x ast.Expr, handler Handler) bool {
- if i, ok := x.(*ast.Ident); ok {
- if i.Obj != nil && i.Obj.Kind == ast.Var {
- switch decl := i.Obj.Decl.(type) {
- case *ast.AssignStmt:
- if decl.Tok == token.DEFINE {
- if defFunc, ok := decl.Rhs[0].(*ast.CallExpr); ok {
- fName := handler.getDefFuncName(defFunc)
- switch fName {
- case "NewGomega", "NewWithT", "NewGomegaWithT":
- return true
- }
- }
- }
- case *ast.Field:
- name := handler.getFieldType(decl)
- switch name {
- case "Gomega", "WithT", "GomegaWithT":
- return true
- }
- }
- }
- }
- return false
-}
-
-func (h dotHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
- actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
- if !ok {
- return nil
- }
-
- switch fun := actualExpr.Fun.(type) {
- case *ast.Ident:
- return actualExpr
- case *ast.SelectorExpr:
- if isHelperMethods(fun.Sel.Name) {
- return h.GetActualExpr(fun)
- }
- if isGomegaVar(fun.X, h) {
- return actualExpr
- }
- }
- return nil
-}
-
-func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
- actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
- if !ok {
- return nil
- }
-
- switch fun := actualExpr.Fun.(type) {
- case *ast.Ident:
- return actualExpr
- case *ast.SelectorExpr:
- if x, ok := fun.X.(*ast.Ident); ok && x.Name == string(g) {
- return actualExpr
- }
- if isHelperMethods(fun.Sel.Name) {
- return g.GetActualExpr(fun)
- }
-
- if isGomegaVar(fun.X, g) {
- return actualExpr
- }
- }
- return nil
-}
-
-func isHelperMethods(funcName string) bool {
- switch funcName {
- case "WithOffset", "WithTimeout", "WithPolling", "Within", "ProbeEvery", "WithContext", "WithArguments", "MustPassRepeatedly":
- return true
- }
-
- return false
-}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go
new file mode 100644
index 000000000..712442426
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegahandler/namedhandler.go
@@ -0,0 +1,112 @@
+package gomegahandler
+
+import (
+ "go/ast"
+
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+// nameHandler is used when importing gomega without name; i.e.
+// import "github.com/onsi/gomega"
+//
+// or with a custom name; e.g.
+// import customname "github.com/onsi/gomega"
+type nameHandler struct {
+ name string
+ pass *analysis.Pass
+}
+
+// GetActualFuncName returns the name of the gomega function, e.g. `Expect`
+func (g nameHandler) GetActualFuncName(expr *ast.CallExpr) (string, bool) {
+ selector, ok := expr.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return "", false
+ }
+
+ switch x := selector.X.(type) {
+ case *ast.Ident:
+ if x.Name != g.name {
+ if !g.isGomegaVar(x) {
+ return "", false
+ }
+ }
+
+ return selector.Sel.Name, true
+
+ case *ast.CallExpr:
+ return g.GetActualFuncName(x)
+ }
+
+ return "", false
+}
+
+// ReplaceFunction replaces the function with another one, for fix suggestions
+func (nameHandler) ReplaceFunction(caller *ast.CallExpr, newExpr *ast.Ident) {
+ caller.Fun.(*ast.SelectorExpr).Sel = newExpr
+}
+
+func (g nameHandler) isGomegaVar(x ast.Expr) bool {
+ return gomegainfo.IsGomegaVar(x, g.pass)
+}
+
+func (g nameHandler) GetActualExpr(assertionFunc *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := assertionFunc.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch fun := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if x, ok := fun.X.(*ast.Ident); ok && x.Name == g.name {
+ return actualExpr
+ }
+ if gomegainfo.IsActualMethod(fun.Sel.Name) {
+ if g.isGomegaVar(fun.X) {
+ return actualExpr
+ }
+ } else {
+ return g.GetActualExpr(fun)
+ }
+ }
+ return nil
+}
+
+func (g nameHandler) GetActualExprClone(origFunc, funcClone *ast.SelectorExpr) *ast.CallExpr {
+ actualExpr, ok := funcClone.X.(*ast.CallExpr)
+ if !ok {
+ return nil
+ }
+
+ switch funClone := actualExpr.Fun.(type) {
+ case *ast.Ident:
+ return actualExpr
+ case *ast.SelectorExpr:
+ if x, ok := funClone.X.(*ast.Ident); ok && x.Name == g.name {
+ return actualExpr
+ }
+ origFun := origFunc.X.(*ast.CallExpr).Fun.(*ast.SelectorExpr)
+ if gomegainfo.IsActualMethod(funClone.Sel.Name) {
+ if g.isGomegaVar(origFun.X) {
+ return actualExpr
+ }
+ } else {
+ return g.GetActualExprClone(origFun, funClone)
+ }
+
+ }
+ return nil
+}
+
+func (g nameHandler) GetNewWrapperMatcher(name string, existing *ast.CallExpr) *ast.CallExpr {
+ return &ast.CallExpr{
+ Fun: &ast.SelectorExpr{
+ X: ast.NewIdent(g.name),
+ Sel: ast.NewIdent(name),
+ },
+ Args: []ast.Expr{existing},
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go
new file mode 100644
index 000000000..ca45a34b2
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/gomegainfo/gomegainfo.go
@@ -0,0 +1,113 @@
+package gomegainfo
+
+import (
+ "go/ast"
+ gotypes "go/types"
+ "regexp"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+const ( // gomega actual method names
+ expect = "Expect"
+ expectWithOffset = "ExpectWithOffset"
+ omega = "Ω"
+ eventually = "Eventually"
+ eventuallyWithOffset = "EventuallyWithOffset"
+ consistently = "Consistently"
+ consistentlyWithOffset = "ConsistentlyWithOffset"
+)
+
+const ( // assertion methods
+ to = "To"
+ toNot = "ToNot"
+ notTo = "NotTo"
+ should = "Should"
+ shouldNot = "ShouldNot"
+)
+
+var funcOffsetMap = map[string]int{
+ expect: 0,
+ expectWithOffset: 1,
+ omega: 0,
+ eventually: 0,
+ eventuallyWithOffset: 1,
+ consistently: 0,
+ consistentlyWithOffset: 1,
+}
+
+func IsActualMethod(name string) bool {
+ _, found := funcOffsetMap[name]
+ return found
+}
+
+func ActualArgOffset(methodName string) int {
+ funcOffset, ok := funcOffsetMap[methodName]
+ if !ok {
+ return -1
+ }
+ return funcOffset
+}
+
+func GetAllowedAssertionMethods(actualMethodName string) string {
+ switch actualMethodName {
+ case expect, expectWithOffset:
+ return `"To()", "ToNot()" or "NotTo()"`
+
+ case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
+ return `"Should()" or "ShouldNot()"`
+
+ case omega:
+ return `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
+
+ default:
+ return ""
+ }
+}
+
+var asyncFuncSet = map[string]struct{}{
+ eventually: {},
+ eventuallyWithOffset: {},
+ consistently: {},
+ consistentlyWithOffset: {},
+}
+
+func IsAsyncActualMethod(name string) bool {
+ _, ok := asyncFuncSet[name]
+ return ok
+}
+
+func IsAssertionFunc(name string) bool {
+ switch name {
+ case to, toNot, notTo, should, shouldNot:
+ return true
+ }
+ return false
+}
+
+var gomegaTypeRegex = regexp.MustCompile(`github\.com/onsi/gomega/(?:internal|types)\.Gomega`)
+
+func IsGomegaVar(x ast.Expr, pass *analysis.Pass) bool {
+ if tx, ok := pass.TypesInfo.Types[x]; ok {
+ return IsGomegaType(tx.Type)
+ }
+
+ return false
+}
+
+func IsGomegaType(t gotypes.Type) bool {
+ var typeStr string
+ switch ttx := t.(type) {
+ case *gotypes.Pointer:
+ tp := ttx.Elem()
+ typeStr = tp.String()
+
+ case *gotypes.Named:
+ typeStr = ttx.String()
+
+ default:
+ return false
+ }
+
+ return gomegaTypeRegex.MatchString(typeStr)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
index dafeacd4f..91849ca56 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/interfaces/interfaces.go
@@ -72,5 +72,5 @@ func ImplementsError(t gotypes.Type) bool {
}
func ImplementsGomegaMatcher(t gotypes.Type) bool {
- return gotypes.Implements(t, gomegaMatcherType)
+ return t != nil && gotypes.Implements(t, gomegaMatcherType)
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
index b8166bdb2..51d55166d 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/intervals/intervals.go
@@ -1,285 +1,166 @@
package intervals
import (
- "errors"
"go/ast"
"go/constant"
"go/token"
gotypes "go/types"
- "strconv"
"time"
"golang.org/x/tools/go/analysis"
-
- "github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
- "github.com/nunnatsa/ginkgolinter/internal/reports"
)
-type noDurationIntervalErr struct {
- value string
-}
-
-func (err noDurationIntervalErr) Error() string {
- return "only use time.Duration for timeout and polling in Eventually() or Consistently()"
-}
-
-func CheckIntervals(pass *analysis.Pass, expr *ast.CallExpr, actualExpr *ast.CallExpr, reportBuilder *reports.Builder, handler gomegahandler.Handler, timePkg string, funcIndex int) {
- var (
- timeout time.Duration
- polling time.Duration
- err error
- )
-
- timeoutOffset := funcIndex + 1
- if len(actualExpr.Args) > timeoutOffset {
- timeout, err = getDuration(pass, actualExpr.Args[timeoutOffset], timePkg)
- if err != nil {
- suggestFix := false
- if tryFixIntDuration(expr, err, handler, timePkg, timeoutOffset) {
- suggestFix = true
- }
- reportBuilder.AddIssue(suggestFix, err.Error())
- }
- pollingOffset := funcIndex + 2
- if len(actualExpr.Args) > pollingOffset {
- polling, err = getDuration(pass, actualExpr.Args[pollingOffset], timePkg)
- if err != nil {
- suggestFix := false
- if tryFixIntDuration(expr, err, handler, timePkg, pollingOffset) {
- suggestFix = true
+func GetDuration(pass *analysis.Pass, argOffset int, origInterval, intervalClone ast.Expr, timePkg string) DurationValue {
+ tv := pass.TypesInfo.Types[origInterval]
+ argType := tv.Type
+ if durType, ok := argType.(*gotypes.Named); ok {
+ if durType.String() == "time.Duration" {
+ if tv.Value != nil {
+ if val, ok := constant.Int64Val(tv.Value); ok {
+ return &RealDurationValue{
+ dur: time.Duration(val),
+ expr: intervalClone,
+ }
}
- reportBuilder.AddIssue(suggestFix, err.Error())
+ }
+ return &UnknownDurationTypeValue{
+ expr: intervalClone,
}
}
}
- selExp := expr.Fun.(*ast.SelectorExpr)
- for {
- call, ok := selExp.X.(*ast.CallExpr)
- if !ok {
- break
- }
-
- fun, ok := call.Fun.(*ast.SelectorExpr)
- if !ok {
- break
- }
-
- switch fun.Sel.Name {
- case "WithTimeout", "Within":
- if timeout != 0 {
- reportBuilder.AddIssue(false, "timeout defined more than once")
- } else if len(call.Args) == 1 {
- timeout, err = getDurationFromValue(pass, call.Args[0], timePkg)
- if err != nil {
- reportBuilder.AddIssue(false, err.Error())
+ if basic, ok := argType.(*gotypes.Basic); ok && tv.Value != nil {
+ if basic.Info()&gotypes.IsInteger != 0 {
+ if num, ok := constant.Int64Val(tv.Value); ok {
+ return &NumericDurationValue{
+ timePkg: timePkg,
+ numSeconds: num,
+ offset: argOffset,
+ dur: time.Duration(num) * time.Second,
+ expr: intervalClone,
}
}
+ }
- case "WithPolling", "ProbeEvery":
- if polling != 0 {
- reportBuilder.AddIssue(false, "polling defined more than once")
- } else if len(call.Args) == 1 {
- polling, err = getDurationFromValue(pass, call.Args[0], timePkg)
- if err != nil {
- reportBuilder.AddIssue(false, err.Error())
+ if basic.Info()&gotypes.IsFloat != 0 {
+ if num, ok := constant.Float64Val(tv.Value); ok {
+ return &NumericDurationValue{
+ timePkg: timePkg,
+ numSeconds: int64(num),
+ offset: argOffset,
+ dur: time.Duration(num) * time.Second,
+ expr: intervalClone,
}
}
}
-
- selExp = fun
}
- if timeout != 0 && polling != 0 && timeout < polling {
- reportBuilder.AddIssue(false, "timeout must not be shorter than the polling interval")
- }
+ return &UnknownDurationValue{expr: intervalClone}
}
-func tryFixIntDuration(expr *ast.CallExpr, err error, handler gomegahandler.Handler, timePkg string, offset int) bool {
- suggestFix := false
- var durErr noDurationIntervalErr
- if errors.As(err, &durErr) {
- if len(durErr.value) > 0 {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- var newArg ast.Expr
- second := &ast.SelectorExpr{
- Sel: ast.NewIdent("Second"),
- X: ast.NewIdent(timePkg),
+func GetDurationFromValue(pass *analysis.Pass, orig, clone ast.Expr) DurationValue {
+ tv := pass.TypesInfo.Types[orig]
+ interval := tv.Value
+ if interval != nil {
+ if val, ok := constant.Int64Val(interval); ok {
+ return RealDurationValue{
+ dur: time.Duration(val),
+ expr: orig,
}
- if durErr.value == "1" {
- newArg = second
- } else {
- newArg = &ast.BinaryExpr{
- X: second,
- Op: token.MUL,
- Y: actualExpr.Args[offset],
- }
- }
- actualExpr.Args[offset] = newArg
- suggestFix = true
}
}
-
- return suggestFix
+ return UnknownDurationTypeValue{expr: clone}
}
-func getDuration(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
- argType := pass.TypesInfo.TypeOf(interval)
- if durType, ok := argType.(*gotypes.Named); ok {
- if durType.Obj().Name() == "Duration" && durType.Obj().Pkg().Name() == "time" {
- return getDurationFromValue(pass, interval, timePkg)
- }
- }
+type DurationValue interface {
+ Duration() time.Duration
+}
- value := ""
- switch val := interval.(type) {
- case *ast.BasicLit:
- if val.Kind == token.INT {
- value = val.Value
- }
- case *ast.Ident:
- i, err := getConstDuration(pass, val, timePkg)
- if err != nil || i == 0 {
- return 0, nil
- }
- value = val.Name
- }
+type NumericValue interface {
+ GetOffset() int
+ GetDurationExpr() ast.Expr
+}
+type RealDurationValue struct {
+ dur time.Duration
+ expr ast.Expr
+}
- return 0, noDurationIntervalErr{value: value}
+func (r RealDurationValue) Duration() time.Duration {
+ return r.dur
}
-func getDurationFromValue(pass *analysis.Pass, interval ast.Expr, timePkg string) (time.Duration, error) {
- switch dur := interval.(type) {
- case *ast.SelectorExpr:
- ident, ok := dur.X.(*ast.Ident)
- if ok {
- if ident.Name == timePkg {
- return getTimeDurationValue(dur)
- }
- return getDurationFromValue(pass, dur.Sel, timePkg)
- }
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, dur, timePkg)
+type NumericDurationValue struct {
+ timePkg string
+ numSeconds int64
+ offset int
+ dur time.Duration
+ expr ast.Expr
+}
- case *ast.Ident:
- return getConstDuration(pass, dur, timePkg)
- }
+func (r *NumericDurationValue) Duration() time.Duration {
+ return r.dur
+}
- return 0, nil
+func (r *NumericDurationValue) GetOffset() int {
+ return r.offset
}
-func getConstDuration(pass *analysis.Pass, ident *ast.Ident, timePkg string) (time.Duration, error) {
- o := pass.TypesInfo.ObjectOf(ident)
- if o != nil {
- if c, ok := o.(*gotypes.Const); ok {
- if c.Val().Kind() == constant.Int {
- i, err := strconv.Atoi(c.Val().String())
- if err != nil {
- return 0, nil
- }
- return time.Duration(i), nil
- }
- }
+func (r *NumericDurationValue) GetDurationExpr() ast.Expr {
+ var newArg ast.Expr
+ second := &ast.SelectorExpr{
+ Sel: ast.NewIdent("Second"),
+ X: ast.NewIdent(r.timePkg),
}
- if ident.Obj != nil && ident.Obj.Kind == ast.Con && ident.Obj.Decl != nil {
- if vals, ok := ident.Obj.Decl.(*ast.ValueSpec); ok {
- if len(vals.Values) == 1 {
- switch val := vals.Values[0].(type) {
- case *ast.BasicLit:
- if val.Kind == token.INT {
- i, err := strconv.Atoi(val.Value)
- if err != nil {
- return 0, nil
- }
- return time.Duration(i), nil
- }
- return 0, nil
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, val, timePkg)
- }
- }
+ if r.numSeconds == 1 {
+ newArg = second
+ } else {
+ newArg = &ast.BinaryExpr{
+ X: second,
+ Op: token.MUL,
+ Y: r.expr,
}
}
- return 0, nil
+ return newArg
}
-func getTimeDurationValue(dur *ast.SelectorExpr) (time.Duration, error) {
- switch dur.Sel.Name {
- case "Nanosecond":
- return time.Nanosecond, nil
- case "Microsecond":
- return time.Microsecond, nil
- case "Millisecond":
- return time.Millisecond, nil
- case "Second":
- return time.Second, nil
- case "Minute":
- return time.Minute, nil
- case "Hour":
- return time.Hour, nil
- default:
- return 0, errors.New("unknown duration value") // should never happen
- }
+type UnknownDurationValue struct {
+ expr ast.Expr
}
-func getBinaryExprDuration(pass *analysis.Pass, expr *ast.BinaryExpr, timePkg string) (time.Duration, error) {
- x, err := getBinaryDurValue(pass, expr.X, timePkg)
- if err != nil || x == 0 {
- return 0, nil
- }
- y, err := getBinaryDurValue(pass, expr.Y, timePkg)
- if err != nil || y == 0 {
- return 0, nil
- }
+func (r UnknownDurationValue) Duration() time.Duration {
+ return 0
+}
- switch expr.Op {
- case token.ADD:
- return x + y, nil
- case token.SUB:
- val := x - y
- if val > 0 {
- return val, nil
- }
- return 0, nil
- case token.MUL:
- return x * y, nil
- case token.QUO:
- if y == 0 {
- return 0, nil
- }
- return x / y, nil
- case token.REM:
- if y == 0 {
- return 0, nil
- }
- return x % y, nil
- default:
- return 0, nil
- }
+type UnknownNumericValue struct {
+ expr ast.Expr
+ offset int
}
-func getBinaryDurValue(pass *analysis.Pass, expr ast.Expr, timePkg string) (time.Duration, error) {
- switch x := expr.(type) {
- case *ast.SelectorExpr:
- return getDurationFromValue(pass, x, timePkg)
- case *ast.BinaryExpr:
- return getBinaryExprDuration(pass, x, timePkg)
- case *ast.BasicLit:
- if x.Kind == token.INT {
- val, err := strconv.Atoi(x.Value)
- if err != nil {
- return 0, err
- }
- return time.Duration(val), nil
- }
- case *ast.ParenExpr:
- return getBinaryDurValue(pass, x.X, timePkg)
+func (r UnknownNumericValue) Duration() time.Duration {
+ return 0
+}
- case *ast.Ident:
- return getConstDuration(pass, x, timePkg)
+func (r UnknownNumericValue) GetDurationExpr() ast.Expr {
+ return &ast.BinaryExpr{
+ X: &ast.SelectorExpr{
+ Sel: ast.NewIdent("Second"),
+ X: ast.NewIdent("time"),
+ },
+ Op: token.MUL,
+ Y: r.expr,
}
+}
+
+func (r UnknownNumericValue) GetOffset() int {
+ return r.offset
+}
+
+type UnknownDurationTypeValue struct {
+ expr ast.Expr
+}
- return 0, nil
+func (r UnknownDurationTypeValue) Duration() time.Duration {
+ return 0
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
index c7f931ca7..dee88bd2c 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/reports/report-builder.go
@@ -1,13 +1,13 @@
package reports
import (
- "bytes"
"fmt"
"go/ast"
- "go/printer"
"go/token"
"strings"
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
+
"golang.org/x/tools/go/analysis"
)
@@ -18,19 +18,25 @@ type Builder struct {
issues []string
fixOffer string
suggestFix bool
+ formatter *formatter.GoFmtFormatter
}
-func NewBuilder(fset *token.FileSet, oldExpr ast.Expr) *Builder {
+func NewBuilder(oldExpr ast.Expr, expFormatter *formatter.GoFmtFormatter) *Builder {
b := &Builder{
pos: oldExpr.Pos(),
end: oldExpr.End(),
- oldExpr: goFmt(fset, oldExpr),
+ oldExpr: expFormatter.Format(oldExpr),
suggestFix: false,
+ formatter: expFormatter,
}
return b
}
+func (b *Builder) OldExp() string {
+ return b.oldExpr
+}
+
func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) {
if len(args) > 0 {
issue = fmt.Sprintf(issue, args...)
@@ -42,9 +48,11 @@ func (b *Builder) AddIssue(suggestFix bool, issue string, args ...any) {
}
}
-func (b *Builder) SetFixOffer(fset *token.FileSet, fixOffer ast.Expr) {
- if offer := goFmt(fset, fixOffer); offer != b.oldExpr {
- b.fixOffer = offer
+func (b *Builder) SetFixOffer(fixOffer ast.Expr) {
+ if b.suggestFix {
+ if offer := b.formatter.Format(fixOffer); offer != b.oldExpr {
+ b.fixOffer = offer
+ }
}
}
@@ -76,10 +84,8 @@ func (b *Builder) Build() analysis.Diagnostic {
return diagnostic
}
-func goFmt(fset *token.FileSet, x ast.Expr) string {
- var b bytes.Buffer
- _ = printer.Fprint(&b, fset, x)
- return b.String()
+func (b *Builder) FormatExpr(expr ast.Expr) string {
+ return b.formatter.Format(expr)
}
func (b *Builder) getMessage() string {
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go
new file mode 100644
index 000000000..e4eda7f6c
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncfunccallrule.go
@@ -0,0 +1,41 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const valueInEventually = "use a function call in %[1]s. This actually checks nothing, because %[1]s receives the function returned value, instead of function itself, and this value is never changed"
+
+// AsyncFuncCallRule checks that there is no function call actual parameter,
+// in an async actual method (e.g. Eventually).
+//
+// Async actual methods should get the function itself, not a function call, because
+// then there is no async operation at all, and we're waiting for the function to be
+// returned before calling the assertion.
+//
+// We do allow functions that return a function, a channel or a pointer.
+type AsyncFuncCallRule struct{}
+
+func (r AsyncFuncCallRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if bool(config.SuppressAsync) || !gexp.IsAsync() {
+ return false
+ }
+
+ if asyncArg := gexp.GetAsyncActualArg(); asyncRules != nil {
+ return !asyncArg.IsValid()
+ }
+
+ return false
+}
+
+func (r AsyncFuncCallRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApplied(gexp, config) {
+
+ gexp.AppendWithArgsToActual()
+
+ reportBuilder.AddIssue(true, valueInEventually, gexp.GetActualFuncName())
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go
new file mode 100644
index 000000000..803c705de
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asyncsucceedrule.go
@@ -0,0 +1,30 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type AsyncSucceedRule struct{}
+
+func (AsyncSucceedRule) isApply(gexp *expression.GomegaExpression) bool {
+ return gexp.IsAsync() &&
+ gexp.MatcherTypeIs(matcher.SucceedMatcherType) &&
+ gexp.ActualArgTypeIs(actual.FuncSigArgType) &&
+ !gexp.ActualArgTypeIs(actual.ErrorTypeArgType|actual.GomegaParamArgType)
+}
+
+func (r AsyncSucceedRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApply(gexp) {
+ if gexp.ActualArgTypeIs(actual.MultiRetsArgType) {
+ reportBuilder.AddIssue(false, "Success matcher does not support multiple values")
+ } else {
+ reportBuilder.AddIssue(false, "Success matcher only support a single error value, or function with Gomega as its first parameter")
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go
new file mode 100644
index 000000000..45953ec01
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/asynctimeintervalsrule.go
@@ -0,0 +1,79 @@
+package rules
+
+import (
+ "go/ast"
+ "time"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/intervals"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ multipleTimeouts = "timeout defined more than once"
+ multiplePolling = "polling defined more than once"
+ onlyUseTimeDurationForInterval = "only use time.Duration for timeout and polling in Eventually() or Consistently()"
+ pollingGreaterThanTimeout = "timeout must not be shorter than the polling interval"
+)
+
+type AsyncTimeIntervalsRule struct{}
+
+func (r AsyncTimeIntervalsRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return !bool(config.SuppressAsync) && bool(config.ValidateAsyncIntervals) && gexp.IsAsync()
+}
+
+func (r AsyncTimeIntervalsRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if r.isApplied(gexp, config) {
+ asyncArg := gexp.GetAsyncActualArg()
+ if asyncArg.TooManyTimeouts() {
+ reportBuilder.AddIssue(false, multipleTimeouts)
+ }
+
+ if asyncArg.TooManyPolling() {
+ reportBuilder.AddIssue(false, multiplePolling)
+ }
+
+ timeoutDuration := checkInterval(gexp, asyncArg.Timeout(), reportBuilder)
+ pollingDuration := checkInterval(gexp, asyncArg.Polling(), reportBuilder)
+
+ if timeoutDuration > 0 && pollingDuration > 0 && pollingDuration > timeoutDuration {
+ reportBuilder.AddIssue(false, pollingGreaterThanTimeout)
+ }
+ }
+
+ return false
+}
+
+func checkInterval(gexp *expression.GomegaExpression, durVal intervals.DurationValue, reportBuilder *reports.Builder) time.Duration {
+ if durVal != nil {
+ switch to := durVal.(type) {
+ case *intervals.RealDurationValue, *intervals.UnknownDurationTypeValue:
+
+ case *intervals.NumericDurationValue:
+ if checkNumericInterval(gexp.GetActualClone(), to) {
+ reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval)
+ }
+
+ case *intervals.UnknownDurationValue:
+ reportBuilder.AddIssue(true, onlyUseTimeDurationForInterval)
+ }
+
+ return durVal.Duration()
+ }
+
+ return 0
+}
+
+func checkNumericInterval(intervalMethod *ast.CallExpr, interval intervals.DurationValue) bool {
+ if interval != nil {
+ if numVal, ok := interval.(intervals.NumericValue); ok {
+ if offset := numVal.GetOffset(); offset > 0 {
+ intervalMethod.Args[offset] = numVal.GetDurationExpr()
+ return true
+ }
+ }
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go
new file mode 100644
index 000000000..e3ad45d96
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/caprule.go
@@ -0,0 +1,128 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongCapWarningTemplate = "wrong cap assertion"
+
+// CapRule does not allow using the cap() function in actual with numeric comparison.
+// it suggests to use the HaveLen matcher, instead.
+type CapRule struct{}
+
+func (r *CapRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if r.fixExpression(gexp) {
+ reportBuilder.AddIssue(true, wrongCapWarningTemplate)
+ return true
+ }
+ return false
+}
+
+func (r *CapRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressLen {
+ return false
+ }
+
+ //matcherType := gexp.matcher.GetMatcherInfo().Type()
+ if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) {
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) {
+ return true
+ }
+
+ if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero)
+ }
+ }
+
+ if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) {
+ return true
+ }
+
+ return false
+}
+
+func (r *CapRule) fixExpression(gexp *expression.GomegaExpression) bool {
+ if gexp.ActualArgTypeIs(actual.CapFuncActualArgType) {
+ return r.fixEqual(gexp)
+ }
+
+ if gexp.ActualArgTypeIs(actual.CapComparisonActualArgType) {
+ return r.fixComparison(gexp)
+ }
+
+ return false
+}
+
+func (r *CapRule) fixEqual(gexp *expression.GomegaExpression) bool {
+ matcherInfo := gexp.GetMatcherInfo()
+ switch mtchr := matcherInfo.(type) {
+ case *matcher.EqualMatcher:
+ gexp.SetMatcherCap(mtchr.GetValueExpr())
+
+ case *matcher.BeZeroMatcher:
+ gexp.SetMatcherCapZero()
+
+ case *matcher.BeNumericallyMatcher:
+ if !r.handleBeNumerically(gexp, mtchr) {
+ return false
+ }
+
+ default:
+ return false
+ }
+
+ gexp.ReplaceActualWithItsFirstArg()
+
+ return true
+}
+
+func (r *CapRule) fixComparison(gexp *expression.GomegaExpression) bool {
+ actl := gexp.GetActualArg().(*actual.FuncComparisonPayload)
+ if op := actl.GetOp(); op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ } else if op != token.EQL {
+ return false
+ }
+
+ gexp.SetMatcherCap(actl.GetValueExpr())
+ gexp.ReplaceActual(actl.GetFuncArg())
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ return true
+}
+
+func (r *CapRule) handleBeNumerically(gexp *expression.GomegaExpression, matcher *matcher.BeNumericallyMatcher) bool {
+ op := matcher.GetOp()
+ val := matcher.GetValue()
+ isValZero := val.String() == "0"
+ isValOne := val.String() == "1"
+
+ if (op == token.GTR && isValZero) || (op == token.GEQ && isValOne) {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherCapZero()
+ } else if op == token.EQL {
+ gexp.SetMatcherCap(matcher.GetValueExpr())
+ } else if op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherCap(matcher.GetValueExpr())
+ } else {
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go
new file mode 100644
index 000000000..dcbea1bc9
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparepointerrule.go
@@ -0,0 +1,64 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const comparePointerToValue = "comparing a pointer to a value will always fail"
+
+type ComparePointRule struct{}
+
+func (r ComparePointRule) isApplied(gexp *expression.GomegaExpression) bool {
+ actl, ok := gexp.GetActualArg().(*actual.RegularArgPayload)
+ if !ok {
+ return false
+ }
+
+ return actl.IsPointer()
+}
+
+func (r ComparePointRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ switch mtchr := gexp.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() {
+ return false
+ }
+
+ case *matcher.BeEquivalentToMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() {
+ return false
+ }
+
+ case *matcher.BeIdenticalToMatcher:
+ if mtchr.IsPointer() || mtchr.IsInterface() || mtchr.IsNil() {
+ return false
+ }
+
+ case *matcher.EqualNilMatcher:
+ return false
+
+ case *matcher.BeTrueMatcher,
+ *matcher.BeFalseMatcher,
+ *matcher.BeNumericallyMatcher,
+ *matcher.EqualTrueMatcher,
+ *matcher.EqualFalseMatcher:
+
+ default:
+ return false
+ }
+
+ getMatcherOnlyRules().Apply(gexp, config, reportBuilder)
+
+ gexp.SetMatcherHaveValue()
+ reportBuilder.AddIssue(true, comparePointerToValue)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go
new file mode 100644
index 000000000..fb38529e0
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/comparisonrule.go
@@ -0,0 +1,75 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongCompareWarningTemplate = "wrong comparison assertion"
+
+type ComparisonRule struct{}
+
+func (r ComparisonRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressCompare {
+ return false
+ }
+
+ return gexp.ActualArgTypeIs(actual.ComparisonActualArgType)
+}
+
+func (r ComparisonRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ actl, ok := gexp.GetActualArg().(actual.ComparisonActualPayload)
+ if !ok {
+ return false
+ }
+
+ switch actl.GetOp() {
+ case token.EQL:
+ r.handleEqualComparison(gexp, actl)
+
+ case token.NEQ:
+ gexp.ReverseAssertionFuncLogic()
+ r.handleEqualComparison(gexp, actl)
+ case token.GTR, token.GEQ, token.LSS, token.LEQ:
+ if !actl.GetRight().IsValueNumeric() {
+ return false
+ }
+
+ gexp.SetMatcherBeNumerically(actl.GetOp(), actl.GetRight().GetValueExpr())
+
+ default:
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ gexp.ReplaceActual(actl.GetLeft().GetValueExpr())
+
+ reportBuilder.AddIssue(true, wrongCompareWarningTemplate)
+ return true
+}
+
+func (r ComparisonRule) handleEqualComparison(gexp *expression.GomegaExpression, actual actual.ComparisonActualPayload) {
+ if actual.GetRight().IsValueZero() {
+ gexp.SetMatcherBeZero()
+ } else {
+ left := actual.GetLeft()
+ arg := actual.GetRight().GetValueExpr()
+ if left.IsInterface() || left.IsPointer() {
+ gexp.SetMatcherBeIdenticalTo(arg)
+ } else {
+ gexp.SetMatcherEqual(arg)
+ }
+ }
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go
new file mode 100644
index 000000000..6ce7be5a5
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/doublenegativerule.go
@@ -0,0 +1,30 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const doubleNegativeWarningTemplate = "avoid double negative assertion"
+
+type DoubleNegativeRule struct{}
+
+func (DoubleNegativeRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.BeFalseMatcherType) &&
+ gexp.IsNegativeAssertion()
+}
+
+func (r DoubleNegativeRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeTrue()
+
+ reportBuilder.AddIssue(true, doubleNegativeWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go
new file mode 100644
index 000000000..e9eaa1b80
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalboolrule.go
@@ -0,0 +1,36 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongBoolWarningTemplate = "wrong boolean assertion"
+
+type EqualBoolRule struct{}
+
+func (r EqualBoolRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType)
+}
+
+func (r EqualBoolRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueTrue) {
+ gexp.SetMatcherBeTrue()
+ } else {
+ if gexp.IsNegativeAssertion() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeTrue()
+ } else {
+ gexp.SetMatcherBeFalse()
+ }
+ }
+
+ reportBuilder.AddIssue(true, wrongBoolWarningTemplate)
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go
new file mode 100644
index 000000000..81d703bb8
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equaldifferenttypesrule.go
@@ -0,0 +1,119 @@
+package rules
+
+import (
+ gotypes "go/types"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()"
+
+type EqualDifferentTypesRule struct{}
+
+func (r EqualDifferentTypesRule) isApplied(config types.Config) bool {
+ return !bool(config.SuppressTypeCompare)
+}
+
+func (r EqualDifferentTypesRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(config) {
+ return false
+ }
+
+ return r.checkEqualDifferentTypes(gexp, gexp.GetMatcher(), false, reportBuilder)
+}
+
+func (r EqualDifferentTypesRule) checkEqualDifferentTypes(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, parentPointer bool, reportBuilder *reports.Builder) bool {
+ actualType := gexp.GetActualArgGOType()
+
+ if parentPointer {
+ if t, ok := actualType.(*gotypes.Pointer); ok {
+ actualType = t.Elem()
+ }
+ }
+
+ var (
+ matcherType gotypes.Type
+ matcherName string
+ )
+
+ switch specificMatcher := mtchr.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.BeIdenticalToMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.HaveValueMatcher:
+ return r.checkEqualDifferentTypes(gexp, specificMatcher.GetNested(), true, reportBuilder)
+
+ case *matcher.MultipleMatchersMatcher:
+ foundIssue := false
+ for i := range specificMatcher.Len() {
+ if r.checkEqualDifferentTypes(gexp, specificMatcher.At(i), parentPointer, reportBuilder) {
+ foundIssue = true
+ }
+
+ }
+ return foundIssue
+
+ case *matcher.EqualNilMatcher:
+ matcherType = specificMatcher.GetType()
+ matcherName = specificMatcher.MatcherName()
+
+ case *matcher.WithTransformMatcher:
+ nested := specificMatcher.GetNested()
+ switch specificNested := nested.GetMatcherInfo().(type) {
+ case *matcher.EqualMatcher:
+ matcherType = specificNested.GetType()
+ matcherName = specificNested.MatcherName()
+
+ case *matcher.BeIdenticalToMatcher:
+ matcherType = specificNested.GetType()
+ matcherName = specificNested.MatcherName()
+
+ default:
+ return false
+ }
+
+ actualType = specificMatcher.GetFuncType()
+ default:
+ return false
+ }
+
+ if !gotypes.Identical(matcherType, actualType) {
+ if r.isImplementing(matcherType, actualType) || r.isImplementing(actualType, matcherType) {
+ return false
+ }
+
+ reportBuilder.AddIssue(false, compareDifferentTypes, matcherName, actualType, matcherType)
+ return true
+ }
+
+ return false
+}
+
+func (r EqualDifferentTypesRule) isImplementing(ifs, impl gotypes.Type) bool {
+ if gotypes.IsInterface(ifs) {
+
+ var (
+ theIfs *gotypes.Interface
+ ok bool
+ )
+
+ for {
+ theIfs, ok = ifs.(*gotypes.Interface)
+ if ok {
+ break
+ }
+ ifs = ifs.Underlying()
+ }
+
+ return gotypes.Implements(impl, theIfs)
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go
new file mode 100644
index 000000000..5b28e7d9b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/equalnilrule.go
@@ -0,0 +1,29 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+// EqualNilRule validate that there is no use of Equal(nil) in the code
+// It is part of assertion only rules
+type EqualNilRule struct{}
+
+func (r EqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return !bool(config.SuppressNil) &&
+ gexp.MatcherTypeIs(matcher.EqualValueMatcherType)
+}
+
+func (r EqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ gexp.SetMatcherBeNil()
+
+ reportBuilder.AddIssue(true, wrongNilWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go
new file mode 100644
index 000000000..7aaf7631b
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/errorequalnilrule.go
@@ -0,0 +1,35 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/value"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type ErrorEqualNilRule struct{}
+
+func (ErrorEqualNilRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return !bool(config.SuppressErr) &&
+ gexp.ActualArgTypeIs(actual.ErrorTypeArgType) &&
+ gexp.MatcherTypeIs(matcher.BeNilMatcherType|matcher.EqualNilMatcherType)
+}
+
+func (r ErrorEqualNilRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if v, ok := gexp.GetActualArg().(value.Valuer); ok && v.IsFunc() || gexp.ActualArgTypeIs(actual.ErrFuncActualArgType) {
+ gexp.SetMatcherSucceed()
+ } else {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+ }
+
+ reportBuilder.AddIssue(true, wrongErrWarningTemplate)
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go
new file mode 100644
index 000000000..391d1d449
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/forceexpecttorule.go
@@ -0,0 +1,43 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const forceExpectToTemplate = "must not use %s with %s"
+
+type ForceExpectToRule struct{}
+
+func (ForceExpectToRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if !config.ForceExpectTo {
+ return false
+ }
+
+ actlName := gexp.GetActualFuncName()
+ return actlName == "Expect" || actlName == "ExpectWithOffset"
+}
+
+func (r ForceExpectToRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ var newName string
+
+ switch gexp.GetAssertFuncName() {
+ case "Should":
+ newName = "To"
+ case "ShouldNot":
+ newName = "ToNot"
+ default:
+ return false
+ }
+
+ gexp.ReplaceAssertionMethod(newName)
+ reportBuilder.AddIssue(true, forceExpectToTemplate, gexp.GetActualFuncName(), gexp.GetOrigAssertFuncName())
+
+ // always return false, to keep checking another rules.
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go
new file mode 100644
index 000000000..20bcb7211
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/havelen0.go
@@ -0,0 +1,23 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type HaveLen0 struct{}
+
+func (r *HaveLen0) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ return gexp.MatcherTypeIs(matcher.HaveLenZeroMatcherType) && !bool(config.AllowHaveLen0)
+}
+
+func (r *HaveLen0) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+ gexp.SetMatcherBeEmpty()
+ reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go
new file mode 100644
index 000000000..437d3ee23
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/haveoccurredrule.go
@@ -0,0 +1,35 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type HaveOccurredRule struct{}
+
+func (r HaveOccurredRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.HaveOccurredMatcherType)
+}
+
+func (r HaveOccurredRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ reportBuilder.AddIssue(false, "asserting a non-error type with HaveOccurred matcher")
+ return true
+ }
+
+ if bool(config.ForceSucceedForFuncs) && gexp.GetActualArg().(*actual.ErrPayload).IsFunc() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherSucceed()
+ reportBuilder.AddIssue(true, "prefer using the Succeed matcher for error function, instead of HaveOccurred")
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go
new file mode 100644
index 000000000..06d6f2c68
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/lenrule.go
@@ -0,0 +1,119 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const wrongLengthWarningTemplate = "wrong length assertion"
+
+// LenRule does not allow using the len() function in actual with numeric comparison. Instead,
+// it suggests to use the HaveLen matcher, or the BeEmpty matcher, if comparing to zero.
+type LenRule struct{}
+
+func (r *LenRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+
+ if !r.isApplied(gexp, config) {
+ return false
+ }
+
+ if r.fixExpression(gexp) {
+ reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
+ return true
+ }
+ return false
+}
+
+func (r *LenRule) isApplied(gexp *expression.GomegaExpression, config types.Config) bool {
+ if config.SuppressLen {
+ return false
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) {
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType | matcher.BeZeroMatcherType) {
+ return true
+ }
+
+ if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ return mtchr.GetOp() == token.EQL || mtchr.GetOp() == token.NEQ || gexp.MatcherTypeIs(matcher.EqualZero|matcher.GreaterThanZero)
+ }
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) && gexp.MatcherTypeIs(matcher.BeTrueMatcherType|matcher.BeFalseMatcherType|matcher.EqualBoolValueMatcherType) {
+ return true
+ }
+
+ return false
+}
+
+func (r *LenRule) fixExpression(gexp *expression.GomegaExpression) bool {
+ if gexp.ActualArgTypeIs(actual.LenFuncActualArgType) {
+ return r.fixEqual(gexp)
+ }
+
+ if gexp.ActualArgTypeIs(actual.LenComparisonActualArgType) {
+ return r.fixComparison(gexp)
+ }
+
+ return false
+}
+
+func (r *LenRule) fixEqual(gexp *expression.GomegaExpression) bool {
+
+ if gexp.MatcherTypeIs(matcher.EqualMatcherType) {
+ gexp.SetLenNumericMatcher()
+
+ } else if gexp.MatcherTypeIs(matcher.BeZeroMatcherType) {
+ gexp.SetMatcherBeEmpty()
+
+ } else if gexp.MatcherTypeIs(matcher.BeNumericallyMatcherType) {
+ mtchr := gexp.GetMatcherInfo().(*matcher.BeNumericallyMatcher)
+ op := mtchr.GetOp()
+
+ if op == token.EQL {
+ gexp.SetLenNumericMatcher()
+ } else if op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetLenNumericMatcher()
+ } else if gexp.MatcherTypeIs(matcher.GreaterThanZero) {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherBeEmpty()
+ } else {
+ return false
+ }
+ } else {
+ return false
+ }
+
+ gexp.ReplaceActualWithItsFirstArg()
+ return true
+}
+
+func (r *LenRule) fixComparison(gexp *expression.GomegaExpression) bool {
+ actl := gexp.GetActualArg().(*actual.FuncComparisonPayload)
+ if op := actl.GetOp(); op == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ } else if op != token.EQL {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ if actl.IsValueZero() {
+ gexp.SetMatcherBeEmpty()
+ } else {
+ gexp.SetMatcherLen(actl.GetValueExpr())
+ }
+
+ gexp.ReplaceActual(actl.GetFuncArg())
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go
new file mode 100644
index 000000000..1174393c6
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcheronlyrule.go
@@ -0,0 +1,12 @@
+package rules
+
+var matcherOnlyRules = Rules{
+ &HaveLen0{},
+ &EqualBoolRule{},
+ &EqualNilRule{},
+ &DoubleNegativeRule{},
+}
+
+func getMatcherOnlyRules() Rules {
+ return matcherOnlyRules
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go
new file mode 100644
index 000000000..767b4b621
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/matcherrorrule.go
@@ -0,0 +1,110 @@
+package rules
+
+import (
+ "go/ast"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)"
+ matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed"
+ matchErrorMissingDescription = "missing function description as second parameter of MatchError"
+ matchErrorRedundantArg = "redundant MatchError arguments; consider removing them"
+ matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)"
+)
+
+// MatchErrorRule validates the usage of the MatchError matcher.
+//
+// # First, it checks that the actual value is actually an error
+//
+// Then, it checks the matcher itself: this matcher can be used in 3 different ways:
+// 1. With error type variable
+// 2. With another gomega matcher, to check the actual err.Error() value
+// 3. With function with a signature of func(error) bool. In this case, additional description
+// string variable is required.
+type MatchErrorRule struct{}
+
+func (r MatchErrorRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.MatcherTypeIs(matcher.MatchErrorMatcherType | matcher.MultipleMatcherMatherType)
+}
+
+func (r MatchErrorRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ return checkMatchError(gexp, reportBuilder)
+}
+
+func checkMatchError(gexp *expression.GomegaExpression, reportBuilder *reports.Builder) bool {
+ mtchr := gexp.GetMatcherInfo()
+ switch m := mtchr.(type) {
+ case matcher.MatchErrorMatcher:
+ return checkMatchErrorMatcher(gexp, gexp.GetMatcher(), m, reportBuilder)
+
+ case *matcher.MultipleMatchersMatcher:
+ res := false
+ for i := range m.Len() {
+ nested := m.At(i)
+ if specific, ok := nested.GetMatcherInfo().(matcher.MatchErrorMatcher); ok {
+ if valid := checkMatchErrorMatcher(gexp, gexp.GetMatcher(), specific, reportBuilder); valid {
+ res = true
+ }
+ }
+ }
+ return res
+ default:
+ return false
+ }
+}
+
+func checkMatchErrorMatcher(gexp *expression.GomegaExpression, mtchr *matcher.Matcher, mtchrInfo matcher.MatchErrorMatcher, reportBuilder *reports.Builder) bool {
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ reportBuilder.AddIssue(false, matchErrorArgWrongType, reportBuilder.FormatExpr(gexp.GetActualArgExpr()))
+ }
+
+ switch m := mtchrInfo.(type) {
+ case *matcher.InvalidMatchErrorMatcher:
+ reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, reportBuilder.FormatExpr(mtchr.Clone.Args[0]))
+
+ case *matcher.MatchErrorMatcherWithErrFunc:
+ if m.NumArgs() == m.AllowedNumArgs() {
+ if !m.IsSecondArgString() {
+ reportBuilder.AddIssue(false, matchErrorNoFuncDescription)
+ }
+ return true
+ }
+
+ if m.NumArgs() == 1 {
+ reportBuilder.AddIssue(false, matchErrorMissingDescription)
+ return true
+ }
+
+ case *matcher.MatchErrorMatcherWithErr,
+ *matcher.MatchErrorMatcherWithMatcher,
+ *matcher.MatchErrorMatcherWithString:
+ // continue
+ default:
+ return false
+ }
+
+ if mtchrInfo.NumArgs() == mtchrInfo.AllowedNumArgs() {
+ return true
+ }
+
+ if mtchrInfo.NumArgs() > mtchrInfo.AllowedNumArgs() {
+ var newArgsSuggestion []ast.Expr
+ for i := 0; i < mtchrInfo.AllowedNumArgs(); i++ {
+ newArgsSuggestion = append(newArgsSuggestion, mtchr.Clone.Args[i])
+ }
+ mtchr.Clone.Args = newArgsSuggestion
+ reportBuilder.AddIssue(false, matchErrorRedundantArg)
+ return true
+ }
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go
new file mode 100644
index 000000000..43fc58bf6
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/missingassertionrule.go
@@ -0,0 +1,27 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/gomegainfo"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const missingAssertionMessage = `%q: missing assertion method. Expected %s`
+
+type MissingAssertionRule struct{}
+
+func (r MissingAssertionRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return gexp.IsMissingAssertion()
+}
+
+func (r MissingAssertionRule) Apply(gexp *expression.GomegaExpression, _ types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ actualMethodName := gexp.GetActualFuncName()
+ reportBuilder.AddIssue(false, missingAssertionMessage, actualMethodName, gomegainfo.GetAllowedAssertionMethods(actualMethodName))
+
+ return true
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go
new file mode 100644
index 000000000..fc3cd49e5
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/nilcomparerule.go
@@ -0,0 +1,75 @@
+package rules
+
+import (
+ "go/token"
+
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+const (
+ wrongNilWarningTemplate = "wrong nil assertion"
+ wrongErrWarningTemplate = "wrong error assertion"
+)
+
+type NilCompareRule struct{}
+
+func (r NilCompareRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ isErr, ruleApplied := r.isApplied(gexp, config)
+ if !ruleApplied {
+ return false
+ }
+
+ if gexp.MatcherTypeIs(matcher.BoolValueFalse) {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ r.handleNilBeBoolMatcher(gexp, gexp.GetActualArg().(*actual.NilComparisonPayload), reportBuilder, isErr)
+
+ return true
+}
+
+func (r NilCompareRule) isApplied(gexp *expression.GomegaExpression, config types.Config) (bool, bool) {
+ if !gexp.MatcherTypeIs(matcher.EqualBoolValueMatcherType | matcher.BeTrueMatcherType | matcher.BeFalseMatcherType) {
+ return false, false
+ }
+
+ actl, ok := gexp.GetActualArg().(*actual.NilComparisonPayload)
+ if !ok {
+ return false, false
+ }
+
+ isErr := actl.IsError() && !bool(config.SuppressErr)
+
+ if !isErr && bool(config.SuppressNil) {
+ return isErr, false
+ }
+
+ return isErr, true
+}
+
+func (r NilCompareRule) handleNilBeBoolMatcher(gexp *expression.GomegaExpression, actl *actual.NilComparisonPayload, reportBuilder *reports.Builder, isErr bool) {
+ template := wrongNilWarningTemplate
+ if isErr {
+ template = wrongErrWarningTemplate
+ if actl.IsFunc() {
+ gexp.SetMatcherSucceed()
+ } else {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+ }
+ } else {
+ gexp.SetMatcherBeNil()
+ }
+
+ gexp.ReplaceActual(actl.GetValueExpr())
+
+ if actl.GetOp() == token.NEQ {
+ gexp.ReverseAssertionFuncLogic()
+ }
+
+ reportBuilder.AddIssue(true, template)
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go
new file mode 100644
index 000000000..cf331c21c
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/rule.go
@@ -0,0 +1,61 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type Rule interface {
+ Apply(*expression.GomegaExpression, types.Config, *reports.Builder) bool
+}
+
+var rules = Rules{
+ &ForceExpectToRule{},
+ &LenRule{},
+ &CapRule{},
+ &ComparisonRule{},
+ &NilCompareRule{},
+ &ComparePointRule{},
+ &ErrorEqualNilRule{},
+ &MatchErrorRule{},
+ getMatcherOnlyRules(),
+ &EqualDifferentTypesRule{},
+ &HaveOccurredRule{},
+ &SucceedRule{},
+}
+
+var asyncRules = Rules{
+ &AsyncFuncCallRule{},
+ &AsyncTimeIntervalsRule{},
+ &ErrorEqualNilRule{},
+ &MatchErrorRule{},
+ &AsyncSucceedRule{},
+ getMatcherOnlyRules(),
+}
+
+func GetRules() Rules {
+ return rules
+}
+
+func GetAsyncRules() Rules {
+ return asyncRules
+}
+
+type Rules []Rule
+
+func (r Rules) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ for _, rule := range r {
+ if rule.Apply(gexp, config, reportBuilder) {
+ return true
+ }
+ }
+
+ return false
+}
+
+var missingAssertionRule = MissingAssertionRule{}
+
+func GetMissingAssertionRule() Rule {
+ return missingAssertionRule
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go
new file mode 100644
index 000000000..6a5167a8a
--- /dev/null
+++ b/vendor/github.com/nunnatsa/ginkgolinter/internal/rules/succeedrule.go
@@ -0,0 +1,41 @@
+package rules
+
+import (
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/actual"
+ "github.com/nunnatsa/ginkgolinter/internal/expression/matcher"
+ "github.com/nunnatsa/ginkgolinter/internal/reports"
+ "github.com/nunnatsa/ginkgolinter/types"
+)
+
+type SucceedRule struct{}
+
+func (r SucceedRule) isApplied(gexp *expression.GomegaExpression) bool {
+ return !gexp.IsAsync() && gexp.MatcherTypeIs(matcher.SucceedMatcherType)
+}
+
+func (r SucceedRule) Apply(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder) bool {
+ if !r.isApplied(gexp) {
+ return false
+ }
+
+ if !gexp.ActualArgTypeIs(actual.ErrorTypeArgType) {
+ if gexp.IsActualTuple() {
+ reportBuilder.AddIssue(false, "the Success matcher does not support multiple values")
+ } else {
+ reportBuilder.AddIssue(false, "asserting a non-error type with Succeed matcher")
+ }
+ return true
+ }
+
+ if bool(config.ForceSucceedForFuncs) && !gexp.GetActualArg().(*actual.ErrPayload).IsFunc() {
+ gexp.ReverseAssertionFuncLogic()
+ gexp.SetMatcherHaveOccurred()
+
+ reportBuilder.AddIssue(true, "prefer using the HaveOccurred matcher for non-function error value, instead of Succeed")
+
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
index 574fdfadf..188b2b5f9 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/linter/ginkgo_linter.go
@@ -1,24 +1,16 @@
package linter
import (
- "bytes"
- "fmt"
"go/ast"
- "go/constant"
- "go/printer"
- "go/token"
- gotypes "go/types"
- "reflect"
- "github.com/go-toolsmith/astcopy"
"golang.org/x/tools/go/analysis"
+ "github.com/nunnatsa/ginkgolinter/internal/expression"
+ "github.com/nunnatsa/ginkgolinter/internal/formatter"
"github.com/nunnatsa/ginkgolinter/internal/ginkgohandler"
"github.com/nunnatsa/ginkgolinter/internal/gomegahandler"
- "github.com/nunnatsa/ginkgolinter/internal/interfaces"
- "github.com/nunnatsa/ginkgolinter/internal/intervals"
"github.com/nunnatsa/ginkgolinter/internal/reports"
- "github.com/nunnatsa/ginkgolinter/internal/reverseassertion"
+ "github.com/nunnatsa/ginkgolinter/internal/rules"
"github.com/nunnatsa/ginkgolinter/types"
)
@@ -26,62 +18,6 @@ import (
//
// For more details, look at the README.md file
-const (
- linterName = "ginkgo-linter"
- wrongLengthWarningTemplate = "wrong length assertion"
- wrongCapWarningTemplate = "wrong cap assertion"
- wrongNilWarningTemplate = "wrong nil assertion"
- wrongBoolWarningTemplate = "wrong boolean assertion"
- wrongErrWarningTemplate = "wrong error assertion"
- wrongCompareWarningTemplate = "wrong comparison assertion"
- doubleNegativeWarningTemplate = "avoid double negative assertion"
- valueInEventually = "use a function call in %s. This actually checks nothing, because %s receives the function returned value, instead of function itself, and this value is never changed"
- comparePointerToValue = "comparing a pointer to a value will always fail"
- missingAssertionMessage = linterName + `: %q: missing assertion method. Expected %s`
- focusContainerFound = linterName + ": Focus container found. This is used only for local debug and should not be part of the actual source code. Consider to replace with %q"
- focusSpecFound = linterName + ": Focus spec found. This is used only for local debug and should not be part of the actual source code. Consider to remove it"
- compareDifferentTypes = "use %[1]s with different types: Comparing %[2]s with %[3]s; either change the expected value type if possible, or use the BeEquivalentTo() matcher, instead of %[1]s()"
- matchErrorArgWrongType = "the MatchError matcher used to assert a non error type (%s)"
- matchErrorWrongTypeAssertion = "MatchError first parameter (%s) must be error, string, GomegaMatcher or func(error)bool are allowed"
- matchErrorMissingDescription = "missing function description as second parameter of MatchError"
- matchErrorRedundantArg = "redundant MatchError arguments; consider removing them"
- matchErrorNoFuncDescription = "The second parameter of MatchError must be the function description (string)"
- forceExpectToTemplate = "must not use Expect with %s"
- useBeforeEachTemplate = "use BeforeEach() to assign variable %s"
-)
-
-const ( // gomega matchers
- beEmpty = "BeEmpty"
- beEquivalentTo = "BeEquivalentTo"
- beFalse = "BeFalse"
- beIdenticalTo = "BeIdenticalTo"
- beNil = "BeNil"
- beNumerically = "BeNumerically"
- beTrue = "BeTrue"
- beZero = "BeZero"
- equal = "Equal"
- haveLen = "HaveLen"
- haveCap = "HaveCap"
- haveOccurred = "HaveOccurred"
- haveValue = "HaveValue"
- not = "Not"
- omega = "Ω"
- succeed = "Succeed"
- and = "And"
- or = "Or"
- withTransform = "WithTransform"
- matchError = "MatchError"
-)
-
-const ( // gomega actuals
- expect = "Expect"
- expectWithOffset = "ExpectWithOffset"
- eventually = "Eventually"
- eventuallyWithOffset = "EventuallyWithOffset"
- consistently = "Consistently"
- consistentlyWithOffset = "ConsistentlyWithOffset"
-)
-
type GinkgoLinter struct {
config *types.Config
}
@@ -94,7 +30,7 @@ func NewGinkgoLinter(config *types.Config) *GinkgoLinter {
}
// Run is the main assertion function
-func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
+func (l *GinkgoLinter) Run(pass *analysis.Pass) (any, error) {
for _, file := range pass.Files {
fileConfig := l.config.Clone()
@@ -102,39 +38,20 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
fileConfig.UpdateFromFile(cm)
- gomegaHndlr := gomegahandler.GetGomegaHandler(file)
+ gomegaHndlr := gomegahandler.GetGomegaHandler(file, pass)
ginkgoHndlr := ginkgohandler.GetGinkgoHandler(file)
if gomegaHndlr == nil && ginkgoHndlr == nil { // no gomega or ginkgo imports => no use in gomega in this file; nothing to do here
continue
}
- timePks := ""
- for _, imp := range file.Imports {
- if imp.Path.Value == `"time"` {
- if imp.Name == nil {
- timePks = "time"
- } else {
- timePks = imp.Name.Name
- }
- }
- }
-
ast.Inspect(file, func(n ast.Node) bool {
if ginkgoHndlr != nil {
goDeeper := false
spec, ok := n.(*ast.ValueSpec)
if ok {
for _, val := range spec.Values {
- if exp, ok := val.(*ast.CallExpr); ok {
- if bool(fileConfig.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, exp) {
- goDeeper = true
- }
-
- if bool(fileConfig.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, exp) {
- goDeeper = true
- }
- }
+ goDeeper = ginkgoHndlr.HandleGinkgoSpecs(val, fileConfig, pass) || goDeeper
}
}
if goDeeper {
@@ -147,1527 +64,68 @@ func (l *GinkgoLinter) Run(pass *analysis.Pass) (interface{}, error) {
return true
}
- config := fileConfig.Clone()
-
- if comments, ok := cm[stmt]; ok {
- config.UpdateFromComment(comments)
- }
-
// search for function calls
assertionExp, ok := stmt.X.(*ast.CallExpr)
if !ok {
return true
}
+ config := fileConfig.Clone()
+ if comments, ok := cm[stmt]; ok {
+ config.UpdateFromComment(comments)
+ }
+
if ginkgoHndlr != nil {
- goDeeper := false
- if bool(config.ForbidFocus) && checkFocusContainer(pass, ginkgoHndlr, assertionExp) {
- goDeeper = true
- }
- if bool(config.ForbidSpecPollution) && checkAssignmentsInContainer(pass, ginkgoHndlr, assertionExp) {
- goDeeper = true
- }
- if goDeeper {
+ if ginkgoHndlr.HandleGinkgoSpecs(assertionExp, config, pass) {
return true
}
}
- // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here. This is
- // mostly to prevent nil pointer error.
+ // no more ginkgo checks. From here it's only gomega. So if there is no gomega handler, exit here.
if gomegaHndlr == nil {
return true
}
- assertionFunc, ok := assertionExp.Fun.(*ast.SelectorExpr)
- if !ok {
- checkNoAssertion(pass, assertionExp, gomegaHndlr)
- return true
- }
-
- if !isAssertionFunc(assertionFunc.Sel.Name) {
- checkNoAssertion(pass, assertionExp, gomegaHndlr)
+ gexp, ok := expression.New(assertionExp, pass, gomegaHndlr, getTimePkg(file))
+ if !ok || gexp == nil {
return true
}
- actualExpr := gomegaHndlr.GetActualExpr(assertionFunc)
- if actualExpr == nil {
- return true
- }
-
- return checkExpression(pass, config, assertionExp, actualExpr, gomegaHndlr, timePks)
+ reportBuilder := reports.NewBuilder(assertionExp, formatter.NewGoFmtFormatter(pass.Fset))
+ return checkGomegaExpression(gexp, config, reportBuilder, pass)
})
}
return nil, nil
}
-func checkAssignmentsInContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
- foundSomething := false
- if ginkgoHndlr.IsWrapContainer(exp) {
- for _, arg := range exp.Args {
- if fn, ok := arg.(*ast.FuncLit); ok {
- if fn.Body != nil {
- if checkAssignments(pass, fn.Body.List) {
- foundSomething = true
- }
- break
- }
- }
- }
- }
-
- return foundSomething
-}
-
-func checkAssignments(pass *analysis.Pass, list []ast.Stmt) bool {
- foundSomething := false
- for _, stmt := range list {
- switch st := stmt.(type) {
- case *ast.DeclStmt:
- if gen, ok := st.Decl.(*ast.GenDecl); ok {
- if gen.Tok != token.VAR {
- continue
- }
- for _, spec := range gen.Specs {
- if valSpec, ok := spec.(*ast.ValueSpec); ok {
- if checkAssignmentsValues(pass, valSpec.Names, valSpec.Values) {
- foundSomething = true
- }
- }
- }
- }
-
- case *ast.AssignStmt:
- for i, val := range st.Rhs {
- if !is[*ast.FuncLit](val) {
- if id, isIdent := st.Lhs[i].(*ast.Ident); isIdent && id.Name != "_" {
- reportNoFix(pass, id.Pos(), useBeforeEachTemplate, id.Name)
- foundSomething = true
- }
- }
- }
-
- case *ast.IfStmt:
- if st.Body != nil {
- if checkAssignments(pass, st.Body.List) {
- foundSomething = true
- }
- }
- if st.Else != nil {
- if block, isBlock := st.Else.(*ast.BlockStmt); isBlock {
- if checkAssignments(pass, block.List) {
- foundSomething = true
- }
- }
- }
- }
- }
-
- return foundSomething
-}
-
-func checkAssignmentsValues(pass *analysis.Pass, names []*ast.Ident, values []ast.Expr) bool {
- foundSomething := false
- for i, val := range values {
- if !is[*ast.FuncLit](val) {
- reportNoFix(pass, names[i].Pos(), useBeforeEachTemplate, names[i].Name)
- foundSomething = true
- }
- }
-
- return foundSomething
-}
-
-func checkFocusContainer(pass *analysis.Pass, ginkgoHndlr ginkgohandler.Handler, exp *ast.CallExpr) bool {
- foundFocus := false
- isFocus, id := ginkgoHndlr.GetFocusContainerName(exp)
- if isFocus {
- reportNewName(pass, id, id.Name[1:], focusContainerFound, id.Name)
- foundFocus = true
- }
-
- if id != nil && ginkgohandler.IsContainer(id.Name) {
- for _, arg := range exp.Args {
- if ginkgoHndlr.IsFocusSpec(arg) {
- reportNoFix(pass, arg.Pos(), focusSpecFound)
- foundFocus = true
- } else if callExp, ok := arg.(*ast.CallExpr); ok {
- if checkFocusContainer(pass, ginkgoHndlr, callExp) { // handle table entries
- foundFocus = true
- }
- }
- }
- }
-
- return foundFocus
-}
-
-func checkExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, timePkg string) bool {
- expr := astcopy.CallExpr(assertionExp)
-
- reportBuilder := reports.NewBuilder(pass.Fset, expr)
-
+func checkGomegaExpression(gexp *expression.GomegaExpression, config types.Config, reportBuilder *reports.Builder, pass *analysis.Pass) bool {
goNested := false
- if checkAsyncAssertion(pass, config, expr, actualExpr, handler, reportBuilder, timePkg) {
+ if rules.GetMissingAssertionRule().Apply(gexp, config, reportBuilder) {
goNested = true
} else {
-
- actualArg := getActualArg(actualExpr, handler)
- if actualArg == nil {
- return true
- }
-
- if config.ForceExpectTo {
- goNested = forceExpectTo(expr, handler, reportBuilder) || goNested
+ if gexp.IsAsync() {
+ rules.GetAsyncRules().Apply(gexp, config, reportBuilder)
+ goNested = true
+ } else {
+ rules.GetRules().Apply(gexp, config, reportBuilder)
}
-
- goNested = doCheckExpression(pass, config, assertionExp, actualArg, expr, handler, reportBuilder) || goNested
}
if reportBuilder.HasReport() {
- reportBuilder.SetFixOffer(pass.Fset, expr)
+ reportBuilder.SetFixOffer(gexp.GetClone())
pass.Report(reportBuilder.Build())
}
return goNested
}
-func forceExpectTo(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if asrtFun, ok := expr.Fun.(*ast.SelectorExpr); ok {
- if actualFuncName, ok := handler.GetActualFuncName(expr); ok && actualFuncName == expect {
- var (
- name string
- newIdent *ast.Ident
- )
-
- switch name = asrtFun.Sel.Name; name {
- case "Should":
- newIdent = ast.NewIdent("To")
- case "ShouldNot":
- newIdent = ast.NewIdent("ToNot")
- default:
- return false
- }
-
- handler.ReplaceFunction(expr, newIdent)
- reportBuilder.AddIssue(true, fmt.Sprintf(forceExpectToTemplate, name))
- return true
- }
- }
-
- return false
-}
-
-func doCheckExpression(pass *analysis.Pass, config types.Config, assertionExp *ast.CallExpr, actualArg ast.Expr, expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if !bool(config.SuppressLen) && isActualIsLenFunc(actualArg) {
- return checkLengthMatcher(expr, pass, handler, reportBuilder)
-
- } else if !bool(config.SuppressLen) && isActualIsCapFunc(actualArg) {
- return checkCapMatcher(expr, handler, reportBuilder)
-
- } else if nilable, compOp := getNilableFromComparison(actualArg); nilable != nil {
- if isExprError(pass, nilable) {
- if config.SuppressErr {
- return true
- }
- } else if config.SuppressNil {
- return true
- }
-
- return checkNilMatcher(expr, pass, nilable, handler, compOp == token.NEQ, reportBuilder)
-
- } else if first, second, op, ok := isComparison(pass, actualArg); ok {
- matcher, shouldContinue := startCheckComparison(expr, handler)
- if !shouldContinue {
- return false
- }
- if !config.SuppressLen {
- if isActualIsLenFunc(first) {
- if handleLenComparison(pass, expr, matcher, first, second, op, handler, reportBuilder) {
- return false
- }
- }
- if isActualIsCapFunc(first) {
- if handleCapComparison(expr, matcher, first, second, op, handler, reportBuilder) {
- return false
- }
- }
- }
- return bool(config.SuppressCompare) || checkComparison(expr, pass, matcher, handler, first, second, op, reportBuilder)
-
- } else if checkMatchError(pass, assertionExp, actualArg, handler, reportBuilder) {
- return false
- } else if isExprError(pass, actualArg) {
- return bool(config.SuppressErr) || checkNilError(pass, expr, handler, actualArg, reportBuilder)
-
- } else if checkPointerComparison(pass, config, assertionExp, expr, actualArg, handler, reportBuilder) {
- return false
- } else if !handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder) {
- return false
- } else if !config.SuppressTypeCompare {
- return !checkEqualWrongType(pass, assertionExp, actualArg, handler, reportBuilder)
- }
-
- return true
-}
-
-func checkMatchError(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return doCheckMatchError(pass, origExp, matcher, actualArg, handler, reportBuilder)
-}
-
-func doCheckMatchError(pass *analysis.Pass, origExp *ast.CallExpr, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- name, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
- switch name {
- case matchError:
- case not:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder)
- case and, or:
- res := false
- for _, arg := range matcher.Args {
- if nested, ok := arg.(*ast.CallExpr); ok {
- if valid := doCheckMatchError(pass, origExp, nested, actualArg, handler, reportBuilder); valid {
- res = true
- }
- }
- }
- return res
- default:
- return false
- }
-
- if !isExprError(pass, actualArg) {
- reportBuilder.AddIssue(false, matchErrorArgWrongType, goFmt(pass.Fset, actualArg))
- }
-
- expr := astcopy.CallExpr(matcher)
-
- validAssertion, requiredParams := checkMatchErrorAssertion(pass, matcher)
- if !validAssertion {
- reportBuilder.AddIssue(false, matchErrorWrongTypeAssertion, goFmt(pass.Fset, matcher.Args[0]))
- }
-
- numParams := len(matcher.Args)
- if numParams == requiredParams {
- if numParams == 2 {
- t := pass.TypesInfo.TypeOf(matcher.Args[1])
- if !gotypes.Identical(t, gotypes.Typ[gotypes.String]) {
- reportBuilder.AddIssue(false, matchErrorNoFuncDescription)
- return true
- }
+func getTimePkg(file *ast.File) string {
+ timePkg := "time"
+ for _, imp := range file.Imports {
+ if imp.Path.Value == `"time"` && imp.Name != nil {
+ timePkg = imp.Name.Name
}
- return true
}
- if requiredParams == 2 && numParams == 1 {
- reportBuilder.AddIssue(false, matchErrorMissingDescription)
- return true
- }
-
- var newArgsSuggestion = []ast.Expr{expr.Args[0]}
- if requiredParams == 2 {
- newArgsSuggestion = append(newArgsSuggestion, expr.Args[1])
- }
- expr.Args = newArgsSuggestion
-
- reportBuilder.AddIssue(true, matchErrorRedundantArg)
- return true
-}
-
-func checkMatchErrorAssertion(pass *analysis.Pass, matcher *ast.CallExpr) (bool, int) {
- if isErrorMatcherValidArg(pass, matcher.Args[0]) {
- return true, 1
- }
-
- t1 := pass.TypesInfo.TypeOf(matcher.Args[0])
- if isFuncErrBool(t1) {
- return true, 2
- }
-
- return false, 0
-}
-
-// isFuncErrBool checks if a function is with the signature `func(error) bool`
-func isFuncErrBool(t gotypes.Type) bool {
- sig, ok := t.(*gotypes.Signature)
- if !ok {
- return false
- }
- if sig.Params().Len() != 1 || sig.Results().Len() != 1 {
- return false
- }
-
- if !interfaces.ImplementsError(sig.Params().At(0).Type()) {
- return false
- }
-
- b, ok := sig.Results().At(0).Type().(*gotypes.Basic)
- if ok && b.Name() == "bool" && b.Info() == gotypes.IsBoolean && b.Kind() == gotypes.Bool {
- return true
- }
-
- return false
-}
-
-func isErrorMatcherValidArg(pass *analysis.Pass, arg ast.Expr) bool {
- if isExprError(pass, arg) {
- return true
- }
-
- if t, ok := pass.TypesInfo.TypeOf(arg).(*gotypes.Basic); ok && t.Kind() == gotypes.String {
- return true
- }
-
- t := pass.TypesInfo.TypeOf(arg)
-
- return interfaces.ImplementsGomegaMatcher(t)
-}
-
-func checkEqualWrongType(pass *analysis.Pass, origExp *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, matcher, actualArg, handler, false, reportBuilder)
-}
-
-func checkEqualDifferentTypes(pass *analysis.Pass, matcher *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, parentPointer bool, reportBuilder *reports.Builder) bool {
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
-
- actualType := pass.TypesInfo.TypeOf(actualArg)
-
- switch matcherFuncName {
- case equal, beIdenticalTo: // continue
- case and, or:
- foundIssue := false
- for _, nestedExp := range matcher.Args {
- nested, ok := nestedExp.(*ast.CallExpr)
- if !ok {
- continue
- }
- if checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder) {
- foundIssue = true
- }
- }
-
- return foundIssue
- case withTransform:
- nested, ok := matcher.Args[1].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok = handler.GetActualFuncName(nested)
- switch matcherFuncName {
- case equal, beIdenticalTo:
- case not:
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
- default:
- return false
- }
-
- if t := getFuncType(pass, matcher.Args[0]); t != nil {
- actualType = t
- matcher = nested
-
- if !ok {
- return false
- }
- } else {
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
- }
-
- case not:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, parentPointer, reportBuilder)
-
- case haveValue:
- nested, ok := matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- return checkEqualDifferentTypes(pass, nested, actualArg, handler, true, reportBuilder)
- default:
- return false
- }
-
- matcherValue := matcher.Args[0]
-
- switch act := actualType.(type) {
- case *gotypes.Tuple:
- actualType = act.At(0).Type()
- case *gotypes.Pointer:
- if parentPointer {
- actualType = act.Elem()
- }
- }
-
- matcherType := pass.TypesInfo.TypeOf(matcherValue)
-
- if !reflect.DeepEqual(matcherType, actualType) {
- // Equal can handle comparison of interface and a value that implements it
- if isImplementing(matcherType, actualType) || isImplementing(actualType, matcherType) {
- return false
- }
-
- reportBuilder.AddIssue(false, compareDifferentTypes, matcherFuncName, actualType, matcherType)
- return true
- }
-
- return false
-}
-
-func getFuncType(pass *analysis.Pass, expr ast.Expr) gotypes.Type {
- switch f := expr.(type) {
- case *ast.FuncLit:
- if f.Type != nil && f.Type.Results != nil && len(f.Type.Results.List) > 0 {
- return pass.TypesInfo.TypeOf(f.Type.Results.List[0].Type)
- }
- case *ast.Ident:
- a := pass.TypesInfo.TypeOf(f)
- if sig, ok := a.(*gotypes.Signature); ok && sig.Results().Len() > 0 {
- return sig.Results().At(0).Type()
- }
- }
-
- return nil
-}
-
-func isImplementing(ifs, impl gotypes.Type) bool {
- if gotypes.IsInterface(ifs) {
-
- var (
- theIfs *gotypes.Interface
- ok bool
- )
-
- for {
- theIfs, ok = ifs.(*gotypes.Interface)
- if ok {
- break
- }
- ifs = ifs.Underlying()
- }
-
- return gotypes.Implements(impl, theIfs)
- }
- return false
-}
-
-// be careful - never change origExp!!! only modify its clone, expr!!!
-func checkPointerComparison(pass *analysis.Pass, config types.Config, origExp *ast.CallExpr, expr *ast.CallExpr, actualArg ast.Expr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- if !isPointer(pass, actualArg) {
- return false
- }
- matcher, ok := origExp.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
-
- // not using recurse here, since we need the original expression, in order to get the TypeInfo, while we should not
- // modify it.
- for matcherFuncName == not {
- reverseAssertionFuncLogic(expr)
- expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0]
- matcher, ok = matcher.Args[0].(*ast.CallExpr)
- if !ok {
- return false
- }
-
- matcherFuncName, ok = handler.GetActualFuncName(matcher)
- if !ok {
- return false
- }
- }
-
- switch matcherFuncName {
- case equal, beIdenticalTo, beEquivalentTo:
- arg := matcher.Args[0]
- if isPointer(pass, arg) {
- return false
- }
- if isNil(arg) {
- return false
- }
- if isInterface(pass, arg) {
- return false
- }
- case beFalse, beTrue, beNumerically:
- default:
- return false
- }
-
- handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
-
- args := []ast.Expr{astcopy.CallExpr(expr.Args[0].(*ast.CallExpr))}
- handler.ReplaceFunction(expr.Args[0].(*ast.CallExpr), ast.NewIdent(haveValue))
- expr.Args[0].(*ast.CallExpr).Args = args
-
- reportBuilder.AddIssue(true, comparePointerToValue)
- return true
-}
-
-// check async assertion does not assert function call. This is a real bug in the test. In this case, the assertion is
-// done on the returned value, instead of polling the result of a function, for instance.
-func checkAsyncAssertion(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, actualExpr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder, timePkg string) bool {
- funcName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return false
- }
-
- var funcIndex int
- switch funcName {
- case eventually, consistently:
- funcIndex = 0
- case eventuallyWithOffset, consistentlyWithOffset:
- funcIndex = 1
- default:
- return false
- }
-
- if !config.SuppressAsync && len(actualExpr.Args) > funcIndex {
- t := pass.TypesInfo.TypeOf(actualExpr.Args[funcIndex])
-
- // skip context variable, if used as first argument
- if "context.Context" == t.String() {
- funcIndex++
- }
-
- if len(actualExpr.Args) > funcIndex {
- if fun, funcCall := actualExpr.Args[funcIndex].(*ast.CallExpr); funcCall {
- t = pass.TypesInfo.TypeOf(fun)
- if !isValidAsyncValueType(t) {
- actualExpr = handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
-
- if len(fun.Args) > 0 {
- origArgs := actualExpr.Args
- origFunc := actualExpr.Fun
- actualExpr.Args = fun.Args
-
- origArgs[funcIndex] = fun.Fun
- call := &ast.SelectorExpr{
- Sel: ast.NewIdent("WithArguments"),
- X: &ast.CallExpr{
- Fun: origFunc,
- Args: origArgs,
- },
- }
-
- actualExpr.Fun = call
- actualExpr.Args = fun.Args
- actualExpr = actualExpr.Fun.(*ast.SelectorExpr).X.(*ast.CallExpr)
- } else {
- actualExpr.Args[funcIndex] = fun.Fun
- }
-
- reportBuilder.AddIssue(true, valueInEventually, funcName, funcName)
- }
- }
- }
-
- if config.ValidateAsyncIntervals {
- intervals.CheckIntervals(pass, expr, actualExpr, reportBuilder, handler, timePkg, funcIndex)
- }
- }
-
- handleAssertionOnly(pass, config, expr, handler, actualExpr, reportBuilder)
- return true
-}
-
-func isValidAsyncValueType(t gotypes.Type) bool {
- switch t.(type) {
- // allow functions that return function or channel.
- case *gotypes.Signature, *gotypes.Chan, *gotypes.Pointer:
- return true
- case *gotypes.Named:
- return isValidAsyncValueType(t.Underlying())
- }
-
- return false
-}
-
-func startCheckComparison(exp *ast.CallExpr, handler gomegahandler.Handler) (*ast.CallExpr, bool) {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return nil, false
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return nil, false
- }
-
- switch matcherFuncName {
- case beTrue:
- case beFalse:
- reverseAssertionFuncLogic(exp)
- case equal:
- boolean, found := matcher.Args[0].(*ast.Ident)
- if !found {
- return nil, false
- }
-
- if boolean.Name == "false" {
- reverseAssertionFuncLogic(exp)
- } else if boolean.Name != "true" {
- return nil, false
- }
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return startCheckComparison(exp, handler)
-
- default:
- return nil, false
- }
-
- return matcher, true
-}
-
-func checkComparison(exp *ast.CallExpr, pass *analysis.Pass, matcher *ast.CallExpr, handler gomegahandler.Handler, first ast.Expr, second ast.Expr, op token.Token, reportBuilder *reports.Builder) bool {
- fun, ok := exp.Fun.(*ast.SelectorExpr)
- if !ok {
- return true
- }
-
- call := handler.GetActualExpr(fun)
- if call == nil {
- return true
- }
-
- switch op {
- case token.EQL:
- handleEqualComparison(pass, matcher, first, second, handler)
-
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- handleEqualComparison(pass, matcher, first, second, handler)
- case token.GTR, token.GEQ, token.LSS, token.LEQ:
- if !isNumeric(pass, first) {
- return true
- }
- handler.ReplaceFunction(matcher, ast.NewIdent(beNumerically))
- matcher.Args = []ast.Expr{
- &ast.BasicLit{Kind: token.STRING, Value: fmt.Sprintf(`"%s"`, op.String())},
- second,
- }
- default:
- return true
- }
-
- call.Args = []ast.Expr{first}
- reportBuilder.AddIssue(true, wrongCompareWarningTemplate)
- return false
-}
-
-func handleEqualComparison(pass *analysis.Pass, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, handler gomegahandler.Handler) {
- if isZero(pass, second) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beZero))
- matcher.Args = nil
- } else {
- t := pass.TypesInfo.TypeOf(first)
- if gotypes.IsInterface(t) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
- } else if is[*gotypes.Pointer](t) {
- handler.ReplaceFunction(matcher, ast.NewIdent(beIdenticalTo))
- } else {
- handler.ReplaceFunction(matcher, ast.NewIdent(equal))
- }
-
- matcher.Args = []ast.Expr{second}
- }
-}
-
-func handleLenComparison(pass *analysis.Pass, exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- switch op {
- case token.EQL:
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- default:
- return false
- }
-
- var eql *ast.Ident
- if isZero(pass, second) {
- eql = ast.NewIdent(beEmpty)
- } else {
- eql = ast.NewIdent(haveLen)
- matcher.Args = []ast.Expr{second}
- }
-
- handler.ReplaceFunction(matcher, eql)
- firstLen, ok := first.(*ast.CallExpr) // assuming it's len()
- if !ok {
- return false // should never happen
- }
-
- val := firstLen.Args[0]
- fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
- fun.Args = []ast.Expr{val}
-
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
- return true
-}
-
-func handleCapComparison(exp *ast.CallExpr, matcher *ast.CallExpr, first ast.Expr, second ast.Expr, op token.Token, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- switch op {
- case token.EQL:
- case token.NEQ:
- reverseAssertionFuncLogic(exp)
- default:
- return false
- }
-
- eql := ast.NewIdent(haveCap)
- matcher.Args = []ast.Expr{second}
-
- handler.ReplaceFunction(matcher, eql)
- firstLen, ok := first.(*ast.CallExpr) // assuming it's len()
- if !ok {
- return false // should never happen
- }
-
- val := firstLen.Args[0]
- fun := handler.GetActualExpr(exp.Fun.(*ast.SelectorExpr))
- fun.Args = []ast.Expr{val}
-
- reportBuilder.AddIssue(true, wrongCapWarningTemplate)
- return true
-}
-
-// Check if the "actual" argument is a call to the golang built-in len() function
-func isActualIsLenFunc(actualArg ast.Expr) bool {
- return checkActualFuncName(actualArg, "len")
-}
-
-// Check if the "actual" argument is a call to the golang built-in len() function
-func isActualIsCapFunc(actualArg ast.Expr) bool {
- return checkActualFuncName(actualArg, "cap")
-}
-
-func checkActualFuncName(actualArg ast.Expr, name string) bool {
- lenArgExp, ok := actualArg.(*ast.CallExpr)
- if !ok {
- return false
- }
-
- lenFunc, ok := lenArgExp.Fun.(*ast.Ident)
- return ok && lenFunc.Name == name
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkLengthMatcher(exp *ast.CallExpr, pass *analysis.Pass, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualLenMatcher(matcher, pass, exp, handler, reportBuilder)
- return false
-
- case beZero:
- handleBeZero(exp, handler, reportBuilder)
- return false
-
- case beNumerically:
- return handleBeNumerically(matcher, pass, exp, handler, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkLengthMatcher(exp, pass, handler, reportBuilder)
-
- default:
- return true
- }
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualCapMatcher(matcher, exp, handler, reportBuilder)
- return false
-
- case beZero:
- handleCapBeZero(exp, handler, reportBuilder)
- return false
-
- case beNumerically:
- return handleCapBeNumerically(matcher, exp, handler, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkCapMatcher(exp, handler, reportBuilder)
-
- default:
- return true
- }
-}
-
-// Check if matcher function is in one of the patterns we want to avoid
-func checkNilMatcher(exp *ast.CallExpr, pass *analysis.Pass, nilable ast.Expr, handler gomegahandler.Handler, notEqual bool, reportBuilder *reports.Builder) bool {
- matcher, ok := exp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- matcherFuncName, ok := handler.GetActualFuncName(matcher)
- if !ok {
- return true
- }
-
- switch matcherFuncName {
- case equal:
- handleEqualNilMatcher(matcher, pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case beTrue:
- handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case beFalse:
- reverseAssertionFuncLogic(exp)
- handleNilBeBoolMatcher(pass, exp, handler, nilable, notEqual, reportBuilder)
-
- case not:
- reverseAssertionFuncLogic(exp)
- exp.Args[0] = exp.Args[0].(*ast.CallExpr).Args[0]
- return checkNilMatcher(exp, pass, nilable, handler, notEqual, reportBuilder)
-
- default:
- return true
- }
- return false
-}
-
-func checkNilError(pass *analysis.Pass, assertionExp *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
- if len(assertionExp.Args) == 0 {
- return true
- }
-
- equalFuncExpr, ok := assertionExp.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- funcName, ok := handler.GetActualFuncName(equalFuncExpr)
- if !ok {
- return true
- }
-
- switch funcName {
- case beNil: // no additional processing needed.
- case equal:
-
- if len(equalFuncExpr.Args) == 0 {
- return true
- }
-
- nilable, ok := equalFuncExpr.Args[0].(*ast.Ident)
- if !ok || nilable.Name != "nil" {
- return true
- }
-
- case not:
- reverseAssertionFuncLogic(assertionExp)
- assertionExp.Args[0] = assertionExp.Args[0].(*ast.CallExpr).Args[0]
- return checkNilError(pass, assertionExp, handler, actualArg, reportBuilder)
- default:
- return true
- }
-
- var newFuncName string
- if is[*ast.CallExpr](actualArg) {
- newFuncName = succeed
- } else {
- reverseAssertionFuncLogic(assertionExp)
- newFuncName = haveOccurred
- }
-
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(newFuncName))
- equalFuncExpr.Args = nil
-
- reportBuilder.AddIssue(true, wrongErrWarningTemplate)
- return false
-}
-
-// handleAssertionOnly checks use-cases when the actual value is valid, but only the assertion should be fixed
-// it handles:
-//
-// Equal(nil) => BeNil()
-// Equal(true) => BeTrue()
-// Equal(false) => BeFalse()
-// HaveLen(0) => BeEmpty()
-func handleAssertionOnly(pass *analysis.Pass, config types.Config, expr *ast.CallExpr, handler gomegahandler.Handler, actualArg ast.Expr, reportBuilder *reports.Builder) bool {
- if len(expr.Args) == 0 {
- return true
- }
-
- equalFuncExpr, ok := expr.Args[0].(*ast.CallExpr)
- if !ok {
- return true
- }
-
- funcName, ok := handler.GetActualFuncName(equalFuncExpr)
- if !ok {
- return true
- }
-
- switch funcName {
- case equal:
- if len(equalFuncExpr.Args) == 0 {
- return true
- }
-
- tkn, ok := equalFuncExpr.Args[0].(*ast.Ident)
- if !ok {
- return true
- }
-
- var replacement string
- var template string
- switch tkn.Name {
- case "nil":
- if config.SuppressNil {
- return true
- }
- replacement = beNil
- template = wrongNilWarningTemplate
- case "true":
- replacement = beTrue
- template = wrongBoolWarningTemplate
- case "false":
- if isNegativeAssertion(expr) {
- reverseAssertionFuncLogic(expr)
- replacement = beTrue
- } else {
- replacement = beFalse
- }
- template = wrongBoolWarningTemplate
- default:
- return true
- }
-
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(replacement))
- equalFuncExpr.Args = nil
-
- reportBuilder.AddIssue(true, template)
- return false
-
- case beFalse:
- if isNegativeAssertion(expr) {
- reverseAssertionFuncLogic(expr)
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beTrue))
- reportBuilder.AddIssue(true, doubleNegativeWarningTemplate)
- return false
- }
- return false
-
- case haveLen:
- if config.AllowHaveLen0 {
- return true
- }
-
- if len(equalFuncExpr.Args) > 0 {
- if isZero(pass, equalFuncExpr.Args[0]) {
- handler.ReplaceFunction(equalFuncExpr, ast.NewIdent(beEmpty))
- equalFuncExpr.Args = nil
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
- return false
- }
- }
-
- return true
-
- case not:
- reverseAssertionFuncLogic(expr)
- expr.Args[0] = expr.Args[0].(*ast.CallExpr).Args[0]
- return handleAssertionOnly(pass, config, expr, handler, actualArg, reportBuilder)
- default:
- return true
- }
-}
-
-func isZero(pass *analysis.Pass, arg ast.Expr) bool {
- if val, ok := arg.(*ast.BasicLit); ok && val.Kind == token.INT && val.Value == "0" {
- return true
- }
- info, ok := pass.TypesInfo.Types[arg]
- if ok {
- if t, ok := info.Type.(*gotypes.Basic); ok && t.Kind() == gotypes.Int && info.Value != nil {
- if i, ok := constant.Int64Val(info.Value); ok && i == 0 {
- return true
- }
- }
- } else if val, ok := arg.(*ast.Ident); ok && val.Obj != nil && val.Obj.Kind == ast.Con {
- if spec, ok := val.Obj.Decl.(*ast.ValueSpec); ok {
- if len(spec.Values) == 1 {
- if value, ok := spec.Values[0].(*ast.BasicLit); ok && value.Kind == token.INT && value.Value == "0" {
- return true
- }
- }
- }
- }
-
- return false
-}
-
-// getActualArg checks that the function is an assertion's actual function and return the "actual" parameter. If the
-// function is not assertion's actual function, return nil.
-func getActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) ast.Expr {
- funcName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return nil
- }
-
- switch funcName {
- case expect, omega:
- return actualExpr.Args[0]
- case expectWithOffset:
- return actualExpr.Args[1]
- default:
- return nil
- }
-}
-
-// Replace the len function call by its parameter, to create a fix suggestion
-func replaceLenActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler) {
- name, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return
- }
-
- switch name {
- case expect, omega:
- arg := actualExpr.Args[0]
- if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
- // replace the len function call by its parameter, to create a fix suggestion
- actualExpr.Args[0] = arg.(*ast.CallExpr).Args[0]
- }
- case expectWithOffset:
- arg := actualExpr.Args[1]
- if isActualIsLenFunc(arg) || isActualIsCapFunc(arg) {
- // replace the len function call by its parameter, to create a fix suggestion
- actualExpr.Args[1] = arg.(*ast.CallExpr).Args[0]
- }
- }
-}
-
-// Replace the nil comparison with the compared object, to create a fix suggestion
-func replaceNilActualArg(actualExpr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr) bool {
- actualFuncName, ok := handler.GetActualFuncName(actualExpr)
- if !ok {
- return false
- }
-
- switch actualFuncName {
- case expect, omega:
- actualExpr.Args[0] = nilable
- return true
-
- case expectWithOffset:
- actualExpr.Args[1] = nilable
- return true
-
- default:
- return false
- }
-}
-
-// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
-func handleBeNumerically(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
- valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
-
- if ok1 && ok2 {
- op := opExp.Value
- val := valExp.Value
-
- if (op == `">"` && val == "0") || (op == `">="` && val == "1") {
- reverseAssertionFuncLogic(exp)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
- exp.Args[0].(*ast.CallExpr).Args = nil
- } else if op == `"=="` {
- chooseNumericMatcher(pass, exp, handler, valExp)
- } else if op == `"!="` {
- reverseAssertionFuncLogic(exp)
- chooseNumericMatcher(pass, exp, handler, valExp)
- } else {
- return true
- }
-
- reportLengthAssertion(exp, handler, reportBuilder)
- return false
- }
- return true
-}
-
-// For the BeNumerically matcher, we want to avoid the assertion of length to be > 0 or >= 1, or just == number
-func handleCapBeNumerically(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) bool {
- opExp, ok1 := matcher.Args[0].(*ast.BasicLit)
- valExp, ok2 := matcher.Args[1].(*ast.BasicLit)
-
- if ok1 && ok2 {
- op := opExp.Value
- val := valExp.Value
-
- if (op == `">"` && val == "0") || (op == `">="` && val == "1") {
- reverseAssertionFuncLogic(exp)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
- } else if op == `"=="` {
- replaceNumericCapMatcher(exp, handler, valExp)
- } else if op == `"!="` {
- reverseAssertionFuncLogic(exp)
- replaceNumericCapMatcher(exp, handler, valExp)
- } else {
- return true
- }
-
- reportCapAssertion(exp, handler, reportBuilder)
- return false
- }
- return true
-}
-
-func chooseNumericMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) {
- caller := exp.Args[0].(*ast.CallExpr)
- if isZero(pass, valExp) {
- handler.ReplaceFunction(caller, ast.NewIdent(beEmpty))
- exp.Args[0].(*ast.CallExpr).Args = nil
- } else {
- handler.ReplaceFunction(caller, ast.NewIdent(haveLen))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp}
- }
-}
-
-func replaceNumericCapMatcher(exp *ast.CallExpr, handler gomegahandler.Handler, valExp ast.Expr) {
- caller := exp.Args[0].(*ast.CallExpr)
- handler.ReplaceFunction(caller, ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{valExp}
-}
-
-func reverseAssertionFuncLogic(exp *ast.CallExpr) {
- assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel
- assertionFunc.Name = reverseassertion.ChangeAssertionLogic(assertionFunc.Name)
-}
-
-func isNegativeAssertion(exp *ast.CallExpr) bool {
- assertionFunc := exp.Fun.(*ast.SelectorExpr).Sel
- return reverseassertion.IsNegativeLogic(assertionFunc.Name)
-}
-
-func handleEqualLenMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- equalTo, ok := matcher.Args[0].(*ast.BasicLit)
- if ok {
- chooseNumericMatcher(pass, exp, handler, equalTo)
- } else {
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveLen))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
- }
- reportLengthAssertion(exp, handler, reportBuilder)
-}
-
-func handleEqualCapMatcher(matcher *ast.CallExpr, exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{matcher.Args[0]}
- reportCapAssertion(exp, handler, reportBuilder)
-}
-
-func handleBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- exp.Args[0].(*ast.CallExpr).Args = nil
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(beEmpty))
- reportLengthAssertion(exp, handler, reportBuilder)
-}
-
-func handleCapBeZero(exp *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- exp.Args[0].(*ast.CallExpr).Args = nil
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(haveCap))
- exp.Args[0].(*ast.CallExpr).Args = []ast.Expr{&ast.BasicLit{Kind: token.INT, Value: "0"}}
- reportCapAssertion(exp, handler, reportBuilder)
-}
-
-func handleEqualNilMatcher(matcher *ast.CallExpr, pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
- equalTo, ok := matcher.Args[0].(*ast.Ident)
- if !ok {
- return
- }
-
- if equalTo.Name == "false" {
- reverseAssertionFuncLogic(exp)
- } else if equalTo.Name != "true" {
- return
- }
-
- newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable)
-
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
- exp.Args[0].(*ast.CallExpr).Args = nil
-
- reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
-}
-
-func handleNilBeBoolMatcher(pass *analysis.Pass, exp *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, reportBuilder *reports.Builder) {
- newFuncName, isItError := handleNilComparisonErr(pass, exp, nilable)
- handler.ReplaceFunction(exp.Args[0].(*ast.CallExpr), ast.NewIdent(newFuncName))
- exp.Args[0].(*ast.CallExpr).Args = nil
-
- reportNilAssertion(exp, handler, nilable, notEqual, isItError, reportBuilder)
-}
-
-func handleNilComparisonErr(pass *analysis.Pass, exp *ast.CallExpr, nilable ast.Expr) (string, bool) {
- newFuncName := beNil
- isItError := isExprError(pass, nilable)
- if isItError {
- if is[*ast.CallExpr](nilable) {
- newFuncName = succeed
- } else {
- reverseAssertionFuncLogic(exp)
- newFuncName = haveOccurred
- }
- }
-
- return newFuncName, isItError
-}
-
-func isAssertionFunc(name string) bool {
- switch name {
- case "To", "ToNot", "NotTo", "Should", "ShouldNot":
- return true
- }
- return false
-}
-
-func reportLengthAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- replaceLenActualArg(actualExpr, handler)
-
- reportBuilder.AddIssue(true, wrongLengthWarningTemplate)
-}
-
-func reportCapAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- replaceLenActualArg(actualExpr, handler)
-
- reportBuilder.AddIssue(true, wrongCapWarningTemplate)
-}
-
-func reportNilAssertion(expr *ast.CallExpr, handler gomegahandler.Handler, nilable ast.Expr, notEqual bool, isItError bool, reportBuilder *reports.Builder) {
- actualExpr := handler.GetActualExpr(expr.Fun.(*ast.SelectorExpr))
- changed := replaceNilActualArg(actualExpr, handler, nilable)
- if !changed {
- return
- }
-
- if notEqual {
- reverseAssertionFuncLogic(expr)
- }
- template := wrongNilWarningTemplate
- if isItError {
- template = wrongErrWarningTemplate
- }
-
- reportBuilder.AddIssue(true, template)
-}
-
-func reportNewName(pass *analysis.Pass, id *ast.Ident, newName string, messageTemplate, oldExpr string) {
- pass.Report(analysis.Diagnostic{
- Pos: id.Pos(),
- Message: fmt.Sprintf(messageTemplate, newName),
- SuggestedFixes: []analysis.SuggestedFix{
- {
- Message: fmt.Sprintf("should replace %s with %s", oldExpr, newName),
- TextEdits: []analysis.TextEdit{
- {
- Pos: id.Pos(),
- End: id.End(),
- NewText: []byte(newName),
- },
- },
- },
- },
- })
-}
-
-func reportNoFix(pass *analysis.Pass, pos token.Pos, message string, args ...any) {
- if len(args) > 0 {
- message = fmt.Sprintf(message, args...)
- }
-
- pass.Report(analysis.Diagnostic{
- Pos: pos,
- Message: message,
- })
-}
-
-func getNilableFromComparison(actualArg ast.Expr) (ast.Expr, token.Token) {
- bin, ok := actualArg.(*ast.BinaryExpr)
- if !ok {
- return nil, token.ILLEGAL
- }
-
- if bin.Op == token.EQL || bin.Op == token.NEQ {
- if isNil(bin.Y) {
- return bin.X, bin.Op
- } else if isNil(bin.X) {
- return bin.Y, bin.Op
- }
- }
-
- return nil, token.ILLEGAL
-}
-
-func isNil(expr ast.Expr) bool {
- nilObject, ok := expr.(*ast.Ident)
- return ok && nilObject.Name == "nil" && nilObject.Obj == nil
-}
-
-func isComparison(pass *analysis.Pass, actualArg ast.Expr) (ast.Expr, ast.Expr, token.Token, bool) {
- bin, ok := actualArg.(*ast.BinaryExpr)
- if !ok {
- return nil, nil, token.ILLEGAL, false
- }
-
- first, second, op := bin.X, bin.Y, bin.Op
- replace := false
- switch realFirst := first.(type) {
- case *ast.Ident: // check if const
- info, ok := pass.TypesInfo.Types[realFirst]
- if ok {
- if is[*gotypes.Basic](info.Type) && info.Value != nil {
- replace = true
- }
- }
-
- case *ast.BasicLit:
- replace = true
- }
-
- if replace {
- first, second = second, first
- }
-
- switch op {
- case token.EQL:
- case token.NEQ:
- case token.GTR, token.GEQ, token.LSS, token.LEQ:
- if replace {
- op = reverseassertion.ChangeCompareOperator(op)
- }
- default:
- return nil, nil, token.ILLEGAL, false
- }
- return first, second, op, true
-}
-
-func goFmt(fset *token.FileSet, x ast.Expr) string {
- var b bytes.Buffer
- _ = printer.Fprint(&b, fset, x)
- return b.String()
-}
-
-func isExprError(pass *analysis.Pass, expr ast.Expr) bool {
- actualArgType := pass.TypesInfo.TypeOf(expr)
- switch t := actualArgType.(type) {
- case *gotypes.Named:
- if interfaces.ImplementsError(actualArgType) {
- return true
- }
- case *gotypes.Tuple:
- if t.Len() > 0 {
- switch t0 := t.At(0).Type().(type) {
- case *gotypes.Named, *gotypes.Pointer:
- if interfaces.ImplementsError(t0) {
- return true
- }
- }
- }
- }
- return false
-}
-
-func isPointer(pass *analysis.Pass, expr ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(expr)
- return is[*gotypes.Pointer](t)
-}
-
-func isInterface(pass *analysis.Pass, expr ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(expr)
- return gotypes.IsInterface(t)
-}
-
-func isNumeric(pass *analysis.Pass, node ast.Expr) bool {
- t := pass.TypesInfo.TypeOf(node)
-
- switch t.String() {
- case "int", "uint", "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64", "float32", "float64":
- return true
- }
- return false
-}
-
-func checkNoAssertion(pass *analysis.Pass, expr *ast.CallExpr, handler gomegahandler.Handler) {
- funcName, ok := handler.GetActualFuncName(expr)
- if ok {
- var allowedFunction string
- switch funcName {
- case expect, expectWithOffset:
- allowedFunction = `"To()", "ToNot()" or "NotTo()"`
- case eventually, eventuallyWithOffset, consistently, consistentlyWithOffset:
- allowedFunction = `"Should()" or "ShouldNot()"`
- case omega:
- allowedFunction = `"Should()", "To()", "ShouldNot()", "ToNot()" or "NotTo()"`
- default:
- return
- }
- reportNoFix(pass, expr.Pos(), missingAssertionMessage, funcName, allowedFunction)
- }
-}
-
-func is[T any](x any) bool {
- _, matchType := x.(T)
- return matchType
+ return timePkg
}
diff --git a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
index b6838e524..0aadd3416 100644
--- a/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
+++ b/vendor/github.com/nunnatsa/ginkgolinter/types/config.go
@@ -28,6 +28,7 @@ type Config struct {
ForceExpectTo Boolean
ValidateAsyncIntervals Boolean
ForbidSpecPollution Boolean
+ ForceSucceedForFuncs Boolean
}
func (s *Config) AllTrue() bool {
@@ -47,6 +48,7 @@ func (s *Config) Clone() Config {
ForceExpectTo: s.ForceExpectTo,
ValidateAsyncIntervals: s.ValidateAsyncIntervals,
ForbidSpecPollution: s.ForbidSpecPollution,
+ ForceSucceedForFuncs: s.ForceSucceedForFuncs,
}
}
diff --git a/vendor/github.com/raeperd/recvcheck/.gitignore b/vendor/github.com/raeperd/recvcheck/.gitignore
new file mode 100644
index 000000000..035dc07e3
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/.gitignore
@@ -0,0 +1,2 @@
+coverage.txt
+cmd/recvcheck/recvcheck
diff --git a/vendor/github.com/lufeee/execinquery/LICENSE b/vendor/github.com/raeperd/recvcheck/LICENSE
index b6ab14aec..a46db59be 100644
--- a/vendor/github.com/lufeee/execinquery/LICENSE
+++ b/vendor/github.com/raeperd/recvcheck/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2022 lufe
+Copyright (c) 2024 raeperd
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/vendor/github.com/raeperd/recvcheck/Makefile b/vendor/github.com/raeperd/recvcheck/Makefile
new file mode 100644
index 000000000..45ca47d9b
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/Makefile
@@ -0,0 +1,14 @@
+all: build test lint
+
+download:
+ go mod download
+
+build: download
+ go build -C cmd/recvcheck
+
+test:
+ go test -race -coverprofile=coverage.txt .
+
+lint:
+ golangci-lint run
+
diff --git a/vendor/github.com/raeperd/recvcheck/README.md b/vendor/github.com/raeperd/recvcheck/README.md
new file mode 100644
index 000000000..db84fe38e
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/README.md
@@ -0,0 +1,52 @@
+# recvcheck
+[![.github/workflows/build.yaml](https://github.com/raeperd/recvcheck/actions/workflows/build.yaml/badge.svg)](https://github.com/raeperd/recvcheck/actions/workflows/build.yaml) [![Go Report Card](https://goreportcard.com/badge/github.com/raeperd/recvcheck)](https://goreportcard.com/report/github.com/raeperd/recvcheck) [![codecov](https://codecov.io/gh/raeperd/recvcheck/graph/badge.svg?token=fPYgEHlq1e)](https://codecov.io/gh/raeperd/recvcheck)
+Golang linter for check receiver type in method
+
+## Motivtation
+From [Go Wiki: Go Code Review Comments - The Go Programming Language](https://go.dev/wiki/CodeReviewComments#receiver-type)
+> Don’t mix receiver types. Choose either pointers or struct types for all available method
+
+Following code from [Dave Chenney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race) causes data race. Could you find it?
+This linter does it for you.
+
+```go
+package main
+
+import (
+ "fmt"
+ "time"
+)
+
+type RPC struct {
+ result int
+ done chan struct{}
+}
+
+func (rpc *RPC) compute() {
+ time.Sleep(time.Second) // strenuous computation intensifies
+ rpc.result = 42
+ close(rpc.done)
+}
+
+func (RPC) version() int {
+ return 1 // never going to need to change this
+}
+
+func main() {
+ rpc := &RPC{done: make(chan struct{})}
+
+ go rpc.compute() // kick off computation in the background
+ version := rpc.version() // grab some other information while we're waiting
+ <-rpc.done // wait for computation to finish
+ result := rpc.result
+
+ fmt.Printf("RPC computation complete, result: %d, version: %d\n", result, version)
+}
+```
+
+## References
+- [Is there a way to detect following data race code using golangci-lint or other linter?? · golangci/golangci-lint · Discussion #5006](https://github.com/golangci/golangci-lint/discussions/5006)
+ - [Wednesday pop quiz: spot the race | Dave Cheney](https://dave.cheney.net/2015/11/18/wednesday-pop-quiz-spot-the-race)
+
+
+
diff --git a/vendor/github.com/raeperd/recvcheck/analyzer.go b/vendor/github.com/raeperd/recvcheck/analyzer.go
new file mode 100644
index 000000000..e80dfc577
--- /dev/null
+++ b/vendor/github.com/raeperd/recvcheck/analyzer.go
@@ -0,0 +1,69 @@
+package recvcheck
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+var Analyzer = &analysis.Analyzer{
+ Name: "recvcheck",
+ Doc: "checks for receiver type consistency",
+ Run: run,
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+}
+
+func run(pass *analysis.Pass) (any, error) {
+ inspector := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ structs := map[string]*structType{}
+ inspector.Preorder([]ast.Node{(*ast.FuncDecl)(nil)}, func(n ast.Node) {
+ funcDecl, ok := n.(*ast.FuncDecl)
+ if !ok || funcDecl.Recv == nil || len(funcDecl.Recv.List) != 1 {
+ return
+ }
+
+ var recv *ast.Ident
+ var isStar bool
+ switch recvType := funcDecl.Recv.List[0].Type.(type) {
+ case *ast.StarExpr:
+ isStar = true
+ if recv, ok = recvType.X.(*ast.Ident); !ok {
+ return
+ }
+ case *ast.Ident:
+ recv = recvType
+ default:
+ return
+ }
+
+ var st *structType
+ st, ok = structs[recv.Name]
+ if !ok {
+ structs[recv.Name] = &structType{recv: recv.Name}
+ st = structs[recv.Name]
+ }
+
+ if isStar {
+ st.numStarMethod++
+ } else {
+ st.numTypeMethod++
+ }
+ })
+
+ for _, st := range structs {
+ if st.numStarMethod > 0 && st.numTypeMethod > 0 {
+ pass.Reportf(pass.Pkg.Scope().Lookup(st.recv).Pos(), "the methods of %q use pointer receiver and non-pointer receiver.", st.recv)
+ }
+ }
+
+ return nil, nil
+}
+
+type structType struct {
+ recv string
+ numStarMethod int
+ numTypeMethod int
+}
diff --git a/vendor/github.com/rivo/uniseg/README.md b/vendor/github.com/rivo/uniseg/README.md
index 25e934687..a8191b815 100644
--- a/vendor/github.com/rivo/uniseg/README.md
+++ b/vendor/github.com/rivo/uniseg/README.md
@@ -3,7 +3,7 @@
[![Go Reference](https://pkg.go.dev/badge/github.com/rivo/uniseg.svg)](https://pkg.go.dev/github.com/rivo/uniseg)
[![Go Report](https://img.shields.io/badge/go%20report-A%2B-brightgreen.svg)](https://goreportcard.com/report/github.com/rivo/uniseg)
-This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 14.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
+This Go package implements Unicode Text Segmentation according to [Unicode Standard Annex #29](https://unicode.org/reports/tr29/), Unicode Line Breaking according to [Unicode Standard Annex #14](https://unicode.org/reports/tr14/) (Unicode version 15.0.0), and monospace font string width calculation similar to [wcwidth](https://man7.org/linux/man-pages/man3/wcwidth.3.html).
## Background
@@ -73,7 +73,7 @@ for gr.Next() {
### Using the [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) Function
-This is orders of magnitude faster than the `Graphemes` class, but it requires the handling of states and boundaries:
+This avoids allocating a new `Graphemes` object but it requires the handling of states and boundaries:
```go
str := "🇩🇪🏳️‍🌈"
@@ -88,29 +88,7 @@ for len(str) > 0 {
### Advanced Examples
-Breaking into grapheme clusters and evaluating line breaks:
-
-```go
-str := "First line.\nSecond line."
-state := -1
-var (
- c string
- boundaries int
-)
-for len(str) > 0 {
- c, str, boundaries, state = uniseg.StepString(str, state)
- fmt.Print(c)
- if boundaries&uniseg.MaskLine == uniseg.LineCanBreak {
- fmt.Print("|")
- } else if boundaries&uniseg.MaskLine == uniseg.LineMustBreak {
- fmt.Print("‖")
- }
-}
-// First |line.
-// ‖Second |line.‖
-```
-
-If you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
+The [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class offers the most convenient way to access all functionality of this package. But in some cases, it may be better to use the specialized functions directly. For example, if you're only interested in word segmentation, use [`FirstWord`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWord) or [`FirstWordInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstWordInString):
```go
str := "Hello, world!"
@@ -133,6 +111,8 @@ Similarly, use
- [`FirstSentence`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentence) or [`FirstSentenceInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstSentenceInString) for sentence segmentation only, and
- [`FirstLineSegment`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegment) or [`FirstLineSegmentInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstLineSegmentInString) for line breaking / word wrapping (although using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step) or [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString) is preferred as it will observe grapheme cluster boundaries).
+If you're only interested in the width of characters, use [`FirstGraphemeCluster`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeCluster) or [`FirstGraphemeClusterInString`](https://pkg.go.dev/github.com/rivo/uniseg#FirstGraphemeClusterInString). It is much faster than using [`Step`](https://pkg.go.dev/github.com/rivo/uniseg#Step), [`StepString`](https://pkg.go.dev/github.com/rivo/uniseg#StepString), or the [`Graphemes`](https://pkg.go.dev/github.com/rivo/uniseg#Graphemes) class because it does not include the logic for word / sentence / line boundaries.
+
Finally, if you need to reverse a string while preserving grapheme clusters, use [`ReverseString`](https://pkg.go.dev/github.com/rivo/uniseg#ReverseString):
```go
diff --git a/vendor/github.com/rivo/uniseg/eastasianwidth.go b/vendor/github.com/rivo/uniseg/eastasianwidth.go
index 661934ac2..5fc54d991 100644
--- a/vendor/github.com/rivo/uniseg/eastasianwidth.go
+++ b/vendor/github.com/rivo/uniseg/eastasianwidth.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// eastAsianWidth are taken from
-// https://www.unicode.org/Public/14.0.0/ucd/EastAsianWidth.txt
+// https://www.unicode.org/Public/15.0.0/ucd/EastAsianWidth.txt
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var eastAsianWidth = [][3]int{
{0x0000, 0x001F, prN}, // Cc [32] <control-0000>..<control-001F>
@@ -504,6 +504,7 @@ var eastAsianWidth = [][3]int{
{0x0CE2, 0x0CE3, prN}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
{0x0CE6, 0x0CEF, prN}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
{0x0CF1, 0x0CF2, prN}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prN}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
{0x0D00, 0x0D01, prN}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
{0x0D02, 0x0D03, prN}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
{0x0D04, 0x0D0C, prN}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
@@ -565,7 +566,7 @@ var eastAsianWidth = [][3]int{
{0x0EBD, 0x0EBD, prN}, // Lo LAO SEMIVOWEL SIGN NYO
{0x0EC0, 0x0EC4, prN}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
{0x0EC6, 0x0EC6, prN}, // Lm LAO KO LA
- {0x0EC8, 0x0ECD, prN}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0EC8, 0x0ECE, prN}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
{0x0ED0, 0x0ED9, prN}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
{0x0EDC, 0x0EDF, prN}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO
{0x0F00, 0x0F00, prN}, // Lo TIBETAN SYLLABLE OM
@@ -1916,6 +1917,7 @@ var eastAsianWidth = [][3]int{
{0x10EAB, 0x10EAC, prN}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
{0x10EAD, 0x10EAD, prN}, // Pd YEZIDI HYPHENATION MARK
{0x10EB0, 0x10EB1, prN}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prN}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
{0x10F00, 0x10F1C, prN}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
{0x10F1D, 0x10F26, prN}, // No [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
{0x10F27, 0x10F27, prN}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
@@ -1998,6 +2000,8 @@ var eastAsianWidth = [][3]int{
{0x11236, 0x11237, prN}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
{0x11238, 0x1123D, prN}, // Po [6] KHOJKI DANDA..KHOJKI ABBREVIATION SIGN
{0x1123E, 0x1123E, prN}, // Mn KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prN}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prN}, // Mn KHOJKI VOWEL SIGN VOCALIC R
{0x11280, 0x11286, prN}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
{0x11288, 0x11288, prN}, // Lo MULTANI LETTER GHA
{0x1128A, 0x1128D, prN}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
@@ -2160,6 +2164,7 @@ var eastAsianWidth = [][3]int{
{0x11A9E, 0x11AA2, prN}, // Po [5] SOYOMBO HEAD MARK WITH MOON AND SUN AND TRIPLE FLAME..SOYOMBO TERMINAL MARK-2
{0x11AB0, 0x11ABF, prN}, // Lo [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
{0x11AC0, 0x11AF8, prN}, // Lo [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prN}, // Po [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
{0x11C00, 0x11C08, prN}, // Lo [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
{0x11C0A, 0x11C2E, prN}, // Lo [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
{0x11C2F, 0x11C2F, prN}, // Mc BHAIKSUKI VOWEL SIGN AA
@@ -2205,6 +2210,19 @@ var eastAsianWidth = [][3]int{
{0x11EF3, 0x11EF4, prN}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
{0x11EF5, 0x11EF6, prN}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
{0x11EF7, 0x11EF8, prN}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prN}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prN}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prN}, // Mc KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prN}, // Lo [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prN}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prN}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prN}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prN}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prN}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prN}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prN}, // Mn KAWI CONJOINER
+ {0x11F43, 0x11F4F, prN}, // Po [13] KAWI DANDA..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prN}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
{0x11FB0, 0x11FB0, prN}, // Lo LISU LETTER YHA
{0x11FC0, 0x11FD4, prN}, // No [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
{0x11FD5, 0x11FDC, prN}, // So [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
@@ -2217,8 +2235,11 @@ var eastAsianWidth = [][3]int{
{0x12480, 0x12543, prN}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
{0x12F90, 0x12FF0, prN}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
{0x12FF1, 0x12FF2, prN}, // Po [2] CYPRO-MINOAN SIGN CM301..CYPRO-MINOAN SIGN CM302
- {0x13000, 0x1342E, prN}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032
- {0x13430, 0x13438, prN}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13000, 0x1342F, prN}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x1343F, prN}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prN}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prN}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prN}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
{0x14400, 0x14646, prN}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
{0x16800, 0x16A38, prN}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
{0x16A40, 0x16A5E, prN}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
@@ -2263,7 +2284,9 @@ var eastAsianWidth = [][3]int{
{0x1AFFD, 0x1AFFE, prW}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
{0x1B000, 0x1B0FF, prW}, // Lo [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
{0x1B100, 0x1B122, prW}, // Lo [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prW}, // Lo HIRAGANA LETTER SMALL KO
{0x1B150, 0x1B152, prW}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prW}, // Lo KATAKANA LETTER SMALL KO
{0x1B164, 0x1B167, prW}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
{0x1B170, 0x1B2FB, prW}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
{0x1BC00, 0x1BC6A, prN}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
@@ -2294,6 +2317,7 @@ var eastAsianWidth = [][3]int{
{0x1D200, 0x1D241, prN}, // So [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
{0x1D242, 0x1D244, prN}, // Mn [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
{0x1D245, 0x1D245, prN}, // So GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prN}, // No [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
{0x1D2E0, 0x1D2F3, prN}, // No [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
{0x1D300, 0x1D356, prN}, // So [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
{0x1D360, 0x1D378, prN}, // No [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
@@ -2353,11 +2377,14 @@ var eastAsianWidth = [][3]int{
{0x1DF00, 0x1DF09, prN}, // Ll [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
{0x1DF0A, 0x1DF0A, prN}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
{0x1DF0B, 0x1DF1E, prN}, // Ll [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prN}, // Ll [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
{0x1E000, 0x1E006, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
{0x1E008, 0x1E018, prN}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
{0x1E01B, 0x1E021, prN}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
{0x1E023, 0x1E024, prN}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
{0x1E026, 0x1E02A, prN}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prN}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prN}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
{0x1E100, 0x1E12C, prN}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
{0x1E130, 0x1E136, prN}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
{0x1E137, 0x1E13D, prN}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
@@ -2370,6 +2397,10 @@ var eastAsianWidth = [][3]int{
{0x1E2EC, 0x1E2EF, prN}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
{0x1E2F0, 0x1E2F9, prN}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
{0x1E2FF, 0x1E2FF, prN}, // Sc WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prN}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prN}, // Lm NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prN}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prN}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
{0x1E7E0, 0x1E7E6, prN}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
{0x1E7E8, 0x1E7EB, prN}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
{0x1E7ED, 0x1E7EE, prN}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
@@ -2498,13 +2529,14 @@ var eastAsianWidth = [][3]int{
{0x1F6D0, 0x1F6D2, prW}, // So [3] PLACE OF WORSHIP..SHOPPING TROLLEY
{0x1F6D3, 0x1F6D4, prN}, // So [2] STUPA..PAGODA
{0x1F6D5, 0x1F6D7, prW}, // So [3] HINDU TEMPLE..ELEVATOR
- {0x1F6DD, 0x1F6DF, prW}, // So [3] PLAYGROUND SLIDE..RING BUOY
+ {0x1F6DC, 0x1F6DF, prW}, // So [4] WIRELESS..RING BUOY
{0x1F6E0, 0x1F6EA, prN}, // So [11] HAMMER AND WRENCH..NORTHEAST-POINTING AIRPLANE
{0x1F6EB, 0x1F6EC, prW}, // So [2] AIRPLANE DEPARTURE..AIRPLANE ARRIVING
{0x1F6F0, 0x1F6F3, prN}, // So [4] SATELLITE..PASSENGER SHIP
{0x1F6F4, 0x1F6FC, prW}, // So [9] SCOOTER..ROLLER SKATE
- {0x1F700, 0x1F773, prN}, // So [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
- {0x1F780, 0x1F7D8, prN}, // So [89] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NEGATIVE CIRCLED SQUARE
+ {0x1F700, 0x1F776, prN}, // So [119] ALCHEMICAL SYMBOL FOR QUINTESSENCE..LUNAR ECLIPSE
+ {0x1F77B, 0x1F77F, prN}, // So [5] HAUMEA..ORCUS
+ {0x1F780, 0x1F7D9, prN}, // So [90] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..NINE POINTED WHITE STAR
{0x1F7E0, 0x1F7EB, prW}, // So [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
{0x1F7F0, 0x1F7F0, prW}, // So HEAVY EQUALS SIGN
{0x1F800, 0x1F80B, prN}, // So [12] LEFTWARDS ARROW WITH SMALL TRIANGLE ARROWHEAD..DOWNWARDS ARROW WITH LARGE TRIANGLE ARROWHEAD
@@ -2521,22 +2553,20 @@ var eastAsianWidth = [][3]int{
{0x1F947, 0x1F9FF, prW}, // So [185] FIRST PLACE MEDAL..NAZAR AMULET
{0x1FA00, 0x1FA53, prN}, // So [84] NEUTRAL CHESS KING..BLACK CHESS KNIGHT-BISHOP
{0x1FA60, 0x1FA6D, prN}, // So [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
- {0x1FA70, 0x1FA74, prW}, // So [5] BALLET SHOES..THONG SANDAL
- {0x1FA78, 0x1FA7C, prW}, // So [5] DROP OF BLOOD..CRUTCH
- {0x1FA80, 0x1FA86, prW}, // So [7] YO-YO..NESTING DOLLS
- {0x1FA90, 0x1FAAC, prW}, // So [29] RINGED PLANET..HAMSA
- {0x1FAB0, 0x1FABA, prW}, // So [11] FLY..NEST WITH EGGS
- {0x1FAC0, 0x1FAC5, prW}, // So [6] ANATOMICAL HEART..PERSON WITH CROWN
- {0x1FAD0, 0x1FAD9, prW}, // So [10] BLUEBERRIES..JAR
- {0x1FAE0, 0x1FAE7, prW}, // So [8] MELTING FACE..BUBBLES
- {0x1FAF0, 0x1FAF6, prW}, // So [7] HAND WITH INDEX FINGER AND THUMB CROSSED..HEART HANDS
+ {0x1FA70, 0x1FA7C, prW}, // So [13] BALLET SHOES..CRUTCH
+ {0x1FA80, 0x1FA88, prW}, // So [9] YO-YO..FLUTE
+ {0x1FA90, 0x1FABD, prW}, // So [46] RINGED PLANET..WING
+ {0x1FABF, 0x1FAC5, prW}, // So [7] GOOSE..PERSON WITH CROWN
+ {0x1FACE, 0x1FADB, prW}, // So [14] MOOSE..PEA POD
+ {0x1FAE0, 0x1FAE8, prW}, // So [9] MELTING FACE..SHAKING FACE
+ {0x1FAF0, 0x1FAF8, prW}, // So [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND
{0x1FB00, 0x1FB92, prN}, // So [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
{0x1FB94, 0x1FBCA, prN}, // So [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
{0x1FBF0, 0x1FBF9, prN}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
{0x20000, 0x2A6DF, prW}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
{0x2A6E0, 0x2A6FF, prW}, // Cn [32] <reserved-2A6E0>..<reserved-2A6FF>
- {0x2A700, 0x2B738, prW}, // Lo [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738
- {0x2B739, 0x2B73F, prW}, // Cn [7] <reserved-2B739>..<reserved-2B73F>
+ {0x2A700, 0x2B739, prW}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739
+ {0x2B73A, 0x2B73F, prW}, // Cn [6] <reserved-2B73A>..<reserved-2B73F>
{0x2B740, 0x2B81D, prW}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
{0x2B81E, 0x2B81F, prW}, // Cn [2] <reserved-2B81E>..<reserved-2B81F>
{0x2B820, 0x2CEA1, prW}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
@@ -2547,7 +2577,9 @@ var eastAsianWidth = [][3]int{
{0x2FA1E, 0x2FA1F, prW}, // Cn [2] <reserved-2FA1E>..<reserved-2FA1F>
{0x2FA20, 0x2FFFD, prW}, // Cn [1502] <reserved-2FA20>..<reserved-2FFFD>
{0x30000, 0x3134A, prW}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
- {0x3134B, 0x3FFFD, prW}, // Cn [60595] <reserved-3134B>..<reserved-3FFFD>
+ {0x3134B, 0x3134F, prW}, // Cn [5] <reserved-3134B>..<reserved-3134F>
+ {0x31350, 0x323AF, prW}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF
+ {0x323B0, 0x3FFFD, prW}, // Cn [56398] <reserved-323B0>..<reserved-3FFFD>
{0xE0001, 0xE0001, prN}, // Cf LANGUAGE TAG
{0xE0020, 0xE007F, prN}, // Cf [96] TAG SPACE..CANCEL TAG
{0xE0100, 0xE01EF, prA}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
diff --git a/vendor/github.com/rivo/uniseg/emojipresentation.go b/vendor/github.com/rivo/uniseg/emojipresentation.go
index fd0f7451a..9b5f499c4 100644
--- a/vendor/github.com/rivo/uniseg/emojipresentation.go
+++ b/vendor/github.com/rivo/uniseg/emojipresentation.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// emojiPresentation are taken from
//
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var emojiPresentation = [][3]int{
{0x231A, 0x231B, prEmojiPresentation}, // E0.6 [2] (⌚..⌛) watch..hourglass done
@@ -211,6 +211,7 @@ var emojiPresentation = [][3]int{
{0x1F6D1, 0x1F6D2, prEmojiPresentation}, // E3.0 [2] (🛑..🛒) stop sign..shopping cart
{0x1F6D5, 0x1F6D5, prEmojiPresentation}, // E12.0 [1] (🛕) hindu temple
{0x1F6D6, 0x1F6D7, prEmojiPresentation}, // E13.0 [2] (🛖..🛗) hut..elevator
+ {0x1F6DC, 0x1F6DC, prEmojiPresentation}, // E15.0 [1] (🛜) wireless
{0x1F6DD, 0x1F6DF, prEmojiPresentation}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
{0x1F6EB, 0x1F6EC, prEmojiPresentation}, // E1.0 [2] (🛫..🛬) airplane departure..airplane arrival
{0x1F6F4, 0x1F6F6, prEmojiPresentation}, // E3.0 [3] (🛴..🛶) kick scooter..canoe
@@ -267,19 +268,28 @@ var emojiPresentation = [][3]int{
{0x1F9E7, 0x1F9FF, prEmojiPresentation}, // E11.0 [25] (🧧..🧿) red envelope..nazar amulet
{0x1FA70, 0x1FA73, prEmojiPresentation}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
{0x1FA74, 0x1FA74, prEmojiPresentation}, // E13.0 [1] (🩴) thong sandal
+ {0x1FA75, 0x1FA77, prEmojiPresentation}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
{0x1FA78, 0x1FA7A, prEmojiPresentation}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
{0x1FA7B, 0x1FA7C, prEmojiPresentation}, // E14.0 [2] (🩻..🩼) x-ray..crutch
{0x1FA80, 0x1FA82, prEmojiPresentation}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
{0x1FA83, 0x1FA86, prEmojiPresentation}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
+ {0x1FA87, 0x1FA88, prEmojiPresentation}, // E15.0 [2] (🪇..🪈) maracas..flute
{0x1FA90, 0x1FA95, prEmojiPresentation}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
{0x1FA96, 0x1FAA8, prEmojiPresentation}, // E13.0 [19] (🪖..🪨) military helmet..rock
{0x1FAA9, 0x1FAAC, prEmojiPresentation}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
+ {0x1FAAD, 0x1FAAF, prEmojiPresentation}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
{0x1FAB0, 0x1FAB6, prEmojiPresentation}, // E13.0 [7] (🪰..🪶) fly..feather
{0x1FAB7, 0x1FABA, prEmojiPresentation}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
+ {0x1FABB, 0x1FABD, prEmojiPresentation}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABF, 0x1FABF, prEmojiPresentation}, // E15.0 [1] (🪿) goose
{0x1FAC0, 0x1FAC2, prEmojiPresentation}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
{0x1FAC3, 0x1FAC5, prEmojiPresentation}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
+ {0x1FACE, 0x1FACF, prEmojiPresentation}, // E15.0 [2] (🫎..🫏) moose..donkey
{0x1FAD0, 0x1FAD6, prEmojiPresentation}, // E13.0 [7] (🫐..🫖) blueberries..teapot
{0x1FAD7, 0x1FAD9, prEmojiPresentation}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
+ {0x1FADA, 0x1FADB, prEmojiPresentation}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
{0x1FAE0, 0x1FAE7, prEmojiPresentation}, // E14.0 [8] (🫠..🫧) melting face..bubbles
+ {0x1FAE8, 0x1FAE8, prEmojiPresentation}, // E15.0 [1] (🫨) shaking face
{0x1FAF0, 0x1FAF6, prEmojiPresentation}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
+ {0x1FAF7, 0x1FAF8, prEmojiPresentation}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
}
diff --git a/vendor/github.com/rivo/uniseg/gen_breaktest.go b/vendor/github.com/rivo/uniseg/gen_breaktest.go
index e613c4cd0..6bfbeb5e7 100644
--- a/vendor/github.com/rivo/uniseg/gen_breaktest.go
+++ b/vendor/github.com/rivo/uniseg/gen_breaktest.go
@@ -32,7 +32,7 @@ import (
// We want to test against a specific version rather than the latest. When the
// package is upgraded to a new version, change these to generate new tests.
const (
- testCaseURL = `https://www.unicode.org/Public/14.0.0/ucd/auxiliary/%s.txt`
+ testCaseURL = `https://www.unicode.org/Public/15.0.0/ucd/auxiliary/%s.txt`
)
func main() {
@@ -76,9 +76,9 @@ func parse(url string) ([]byte, error) {
buf := new(bytes.Buffer)
buf.Grow(120 << 10)
- buf.WriteString(`package uniseg
+ buf.WriteString(`// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
-// Code generated via go generate from gen_breaktest.go. DO NOT EDIT.
+package uniseg
// ` + os.Args[3] + ` are Grapheme testcases taken from
// ` + url + `
@@ -136,7 +136,9 @@ var (
//
// E.g. for the input b="÷ 0020 × 0308 ÷ 1F1E6 ÷"
// it will append
-// "\u0020\u0308\U0001F1E6"
+//
+// "\u0020\u0308\U0001F1E6"
+//
// and "[][]rune{{0x0020,0x0308},{0x1F1E6},}"
// to orig and exp respectively.
//
diff --git a/vendor/github.com/rivo/uniseg/gen_properties.go b/vendor/github.com/rivo/uniseg/gen_properties.go
index 999d5efdd..8992d2c5f 100644
--- a/vendor/github.com/rivo/uniseg/gen_properties.go
+++ b/vendor/github.com/rivo/uniseg/gen_properties.go
@@ -41,8 +41,8 @@ import (
// We want to test against a specific version rather than the latest. When the
// package is upgraded to a new version, change these to generate new tests.
const (
- propertyURL = `https://www.unicode.org/Public/14.0.0/ucd/%s.txt`
- emojiURL = `https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt`
+ propertyURL = `https://www.unicode.org/Public/15.0.0/ucd/%s.txt`
+ emojiURL = `https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt`
)
// The regular expression for a line containing a code point range property.
@@ -178,6 +178,11 @@ func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (stri
}
}
+ // Avoid overflow during binary search.
+ if len(properties) >= 1<<31 {
+ return "", errors.New("too many properties")
+ }
+
// Sort properties.
sort.Slice(properties, func(i, j int) bool {
left, _ := strconv.ParseUint(properties[i][0], 16, 64)
@@ -200,9 +205,9 @@ func parse(propertyURL, emojiProperty string, includeGeneralCategory bool) (stri
// ` + emojiURL + `
// ("Extended_Pictographic" only)`
}
- buf.WriteString(`package uniseg
+ buf.WriteString(`// Code generated via go generate from gen_properties.go. DO NOT EDIT.
-// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
// ` + os.Args[3] + ` are taken from
// ` + propertyURL + emojiComment + `
diff --git a/vendor/github.com/rivo/uniseg/grapheme.go b/vendor/github.com/rivo/uniseg/grapheme.go
index 0086fc1b2..b12403d43 100644
--- a/vendor/github.com/rivo/uniseg/grapheme.go
+++ b/vendor/github.com/rivo/uniseg/grapheme.go
@@ -13,9 +13,10 @@ import "unicode/utf8"
// well as boundary information and character width is available via the various
// methods (see examples below).
//
-// Using this class to iterate over a string is convenient but it is much slower
-// than using this package's [Step] or [StepString] functions or any of the
-// other specialized functions starting with "First".
+// This class basically wraps the [StepString] parser and provides a convenient
+// interface to it. If you are only interested in some parts of this package's
+// functionality, using the specialized functions starting with "First" is
+// almost always faster.
type Graphemes struct {
// The original string.
original string
@@ -222,7 +223,7 @@ func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, new
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
var prop int
if state < 0 {
- prop = property(graphemeCodePoints, r)
+ prop = propertyGraphemes(r)
} else {
prop = state >> shiftGraphemePropState
}
@@ -252,16 +253,14 @@ func FirstGraphemeCluster(b []byte, state int) (cluster, rest []byte, width, new
return b[:length], b[length:], width, state | (prop << shiftGraphemePropState)
}
- if r == vs16 {
- width = 2
- } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
- width += runeWidth(r, prop)
- } else if firstProp == prExtendedPictographic {
+ if firstProp == prExtendedPictographic {
if r == vs15 {
width = 1
- } else {
+ } else if r == vs16 {
width = 2
}
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
}
length += l
@@ -284,7 +283,7 @@ func FirstGraphemeClusterInString(str string, state int) (cluster, rest string,
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
var prop int
if state < 0 {
- prop = property(graphemeCodePoints, r)
+ prop = propertyGraphemes(r)
} else {
prop = state >> shiftGraphemePropState
}
@@ -314,16 +313,14 @@ func FirstGraphemeClusterInString(str string, state int) (cluster, rest string,
return str[:length], str[length:], width, state | (prop << shiftGraphemePropState)
}
- if r == vs16 {
- width = 2
- } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
- width += runeWidth(r, prop)
- } else if firstProp == prExtendedPictographic {
+ if firstProp == prExtendedPictographic {
if r == vs15 {
width = 1
- } else {
+ } else if r == vs16 {
width = 2
}
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
}
length += l
diff --git a/vendor/github.com/rivo/uniseg/graphemeproperties.go b/vendor/github.com/rivo/uniseg/graphemeproperties.go
index a87d140bf..0aff4a619 100644
--- a/vendor/github.com/rivo/uniseg/graphemeproperties.go
+++ b/vendor/github.com/rivo/uniseg/graphemeproperties.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// graphemeCodePoints are taken from
-// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/GraphemeBreakProperty.txt
+// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/GraphemeBreakProperty.txt
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var graphemeCodePoints = [][3]int{
{0x0000, 0x0009, prControl}, // Cc [10] <control-0000>..<control-0009>
@@ -143,6 +143,7 @@ var graphemeCodePoints = [][3]int{
{0x0CCC, 0x0CCD, prExtend}, // Mn [2] KANNADA VOWEL SIGN AU..KANNADA SIGN VIRAMA
{0x0CD5, 0x0CD6, prExtend}, // Mc [2] KANNADA LENGTH MARK..KANNADA AI LENGTH MARK
{0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
+ {0x0CF3, 0x0CF3, prSpacingMark}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
{0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
{0x0D02, 0x0D03, prSpacingMark}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
{0x0D3B, 0x0D3C, prExtend}, // Mn [2] MALAYALAM SIGN VERTICAL BAR VIRAMA..MALAYALAM SIGN CIRCULAR VIRAMA
@@ -172,7 +173,7 @@ var graphemeCodePoints = [][3]int{
{0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN
{0x0EB3, 0x0EB3, prSpacingMark}, // Lo LAO VOWEL SIGN AM
{0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
- {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
{0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
{0x0F35, 0x0F35, prExtend}, // Mn TIBETAN MARK NGAS BZUNG NYI ZLA
{0x0F37, 0x0F37, prExtend}, // Mn TIBETAN MARK NGAS BZUNG SGOR RTAGS
@@ -1336,6 +1337,7 @@ var graphemeCodePoints = [][3]int{
{0x10AE5, 0x10AE6, prExtend}, // Mn [2] MANICHAEAN ABBREVIATION MARK ABOVE..MANICHAEAN ABBREVIATION MARK BELOW
{0x10D24, 0x10D27, prExtend}, // Mn [4] HANIFI ROHINGYA SIGN HARBAHAY..HANIFI ROHINGYA SIGN TASSI
{0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
+ {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
{0x10F46, 0x10F50, prExtend}, // Mn [11] SOGDIAN COMBINING DOT BELOW..SOGDIAN COMBINING STROKE BELOW
{0x10F82, 0x10F85, prExtend}, // Mn [4] OLD UYGHUR COMBINING DOT ABOVE..OLD UYGHUR COMBINING TWO DOTS BELOW
{0x11000, 0x11000, prSpacingMark}, // Mc BRAHMI SIGN CANDRABINDU
@@ -1375,6 +1377,7 @@ var graphemeCodePoints = [][3]int{
{0x11235, 0x11235, prSpacingMark}, // Mc KHOJKI SIGN VIRAMA
{0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
{0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R
{0x112DF, 0x112DF, prExtend}, // Mn KHUDAWADI SIGN ANUSVARA
{0x112E0, 0x112E2, prSpacingMark}, // Mc [3] KHUDAWADI VOWEL SIGN AA..KHUDAWADI VOWEL SIGN II
{0x112E3, 0x112EA, prExtend}, // Mn [8] KHUDAWADI VOWEL SIGN U..KHUDAWADI SIGN VIRAMA
@@ -1494,7 +1497,18 @@ var graphemeCodePoints = [][3]int{
{0x11D97, 0x11D97, prExtend}, // Mn GUNJALA GONDI VIRAMA
{0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
{0x11EF5, 0x11EF6, prSpacingMark}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
- {0x13430, 0x13438, prControl}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prPrepend}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prSpacingMark}, // Mc KAWI SIGN VISARGA
+ {0x11F34, 0x11F35, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prSpacingMark}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prSpacingMark}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER
+ {0x13430, 0x1343F, prControl}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
{0x16AF0, 0x16AF4, prExtend}, // Mn [5] BASSA VAH COMBINING HIGH TONE..BASSA VAH COMBINING HIGH-LOW TONE
{0x16B30, 0x16B36, prExtend}, // Mn [7] PAHAWH HMONG MARK CIM TUB..PAHAWH HMONG MARK CIM TAUM
{0x16F4F, 0x16F4F, prExtend}, // Mn MIAO SIGN CONSONANT MODIFIER BAR
@@ -1527,9 +1541,11 @@ var graphemeCodePoints = [][3]int{
{0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
{0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
{0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
{0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
{0x1E2AE, 0x1E2AE, prExtend}, // Mn TOTO SIGN RISING TONE
{0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
+ {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
{0x1E8D0, 0x1E8D6, prExtend}, // Mn [7] MENDE KIKAKUI COMBINING NUMBER TEENS..MENDE KIKAKUI COMBINING NUMBER MILLIONS
{0x1E944, 0x1E94A, prExtend}, // Mn [7] ADLAM ALIF LENGTHENER..ADLAM NUKTA
{0x1F000, 0x1F003, prExtendedPictographic}, // E0.0 [4] (🀀..🀃) MAHJONG TILE EAST WIND..MAHJONG TILE NORTH WIND
@@ -1780,7 +1796,8 @@ var graphemeCodePoints = [][3]int{
{0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA
{0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple
{0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator
- {0x1F6D8, 0x1F6DC, prExtendedPictographic}, // E0.0 [5] (🛘..🛜) <reserved-1F6D8>..<reserved-1F6DC>
+ {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (🛘..🛛) <reserved-1F6D8>..<reserved-1F6DB>
+ {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless
{0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
{0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat
{0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE
@@ -1797,7 +1814,7 @@ var graphemeCodePoints = [][3]int{
{0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw
{0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
{0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (🛽..🛿) <reserved-1F6FD>..<reserved-1F6FF>
- {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) <reserved-1F774>..<reserved-1F77F>
+ {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
{0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE..<reserved-1F7DF>
{0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square
{0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (🟬..🟯) <reserved-1F7EC>..<reserved-1F7EF>
@@ -1856,30 +1873,37 @@ var graphemeCodePoints = [][3]int{
{0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING..<reserved-1FA6F>
{0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
{0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal
- {0x1FA75, 0x1FA77, prExtendedPictographic}, // E0.0 [3] (🩵..🩷) <reserved-1FA75>..<reserved-1FA77>
+ {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
{0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
{0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch
{0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (🩽..🩿) <reserved-1FA7D>..<reserved-1FA7F>
{0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
{0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
- {0x1FA87, 0x1FA8F, prExtendedPictographic}, // E0.0 [9] (🪇..🪏) <reserved-1FA87>..<reserved-1FA8F>
+ {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (🪉..🪏) <reserved-1FA89>..<reserved-1FA8F>
{0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
{0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock
{0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
- {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E0.0 [3] (🪭..🪯) <reserved-1FAAD>..<reserved-1FAAF>
+ {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
{0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather
{0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
- {0x1FABB, 0x1FABF, prExtendedPictographic}, // E0.0 [5] (🪻..🪿) <reserved-1FABB>..<reserved-1FABF>
+ {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] (🪾) <reserved-1FABE>
+ {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose
{0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
{0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
- {0x1FAC6, 0x1FACF, prExtendedPictographic}, // E0.0 [10] (🫆..🫏) <reserved-1FAC6>..<reserved-1FACF>
+ {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (🫆..🫍) <reserved-1FAC6>..<reserved-1FACD>
+ {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey
{0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot
{0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
- {0x1FADA, 0x1FADF, prExtendedPictographic}, // E0.0 [6] (🫚..🫟) <reserved-1FADA>..<reserved-1FADF>
+ {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (🫜..🫟) <reserved-1FADC>..<reserved-1FADF>
{0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles
- {0x1FAE8, 0x1FAEF, prExtendedPictographic}, // E0.0 [8] (🫨..🫯) <reserved-1FAE8>..<reserved-1FAEF>
+ {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face
+ {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (🫩..🫯) <reserved-1FAE9>..<reserved-1FAEF>
{0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
- {0x1FAF7, 0x1FAFF, prExtendedPictographic}, // E0.0 [9] (🫷..🫿) <reserved-1FAF7>..<reserved-1FAFF>
+ {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+ {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (🫹..🫿) <reserved-1FAF9>..<reserved-1FAFF>
{0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (🰀..🿽) <reserved-1FC00>..<reserved-1FFFD>
{0xE0000, 0xE0000, prControl}, // Cn <reserved-E0000>
{0xE0001, 0xE0001, prControl}, // Cf LANGUAGE TAG
diff --git a/vendor/github.com/rivo/uniseg/graphemerules.go b/vendor/github.com/rivo/uniseg/graphemerules.go
index 9f46b575b..5d399d29c 100644
--- a/vendor/github.com/rivo/uniseg/graphemerules.go
+++ b/vendor/github.com/rivo/uniseg/graphemerules.go
@@ -21,11 +21,12 @@ const (
grBoundary
)
-// The grapheme cluster parser's state transitions. Maps (state, property) to
-// (new state, breaking instruction, rule number). The breaking instruction
-// always refers to the boundary between the last and next code point.
+// grTransitions implements the grapheme cluster parser's state transitions.
+// Maps state and property to a new state, a breaking instruction, and rule
+// number. The breaking instruction always refers to the boundary between the
+// last and next code point. Returns negative values if no transition is found.
//
-// This map is queried as follows:
+// This function is used as follows:
//
// 1. Find specific state + specific property. Stop if found.
// 2. Find specific state + any property.
@@ -36,59 +37,96 @@ const (
// are equal. Stop.
// 6. Assume grAny and grBoundary.
//
-// Unicode version 14.0.0.
-var grTransitions = map[[2]int][3]int{
+// Unicode version 15.0.0.
+func grTransitions(state, prop int) (newState int, newProp int, boundary int) {
+ // It turns out that using a big switch statement is much faster than using
+ // a map.
+
+ switch uint64(state) | uint64(prop)<<32 {
// GB5
- {grAny, prCR}: {grCR, grBoundary, 50},
- {grAny, prLF}: {grControlLF, grBoundary, 50},
- {grAny, prControl}: {grControlLF, grBoundary, 50},
+ case grAny | prCR<<32:
+ return grCR, grBoundary, 50
+ case grAny | prLF<<32:
+ return grControlLF, grBoundary, 50
+ case grAny | prControl<<32:
+ return grControlLF, grBoundary, 50
// GB4
- {grCR, prAny}: {grAny, grBoundary, 40},
- {grControlLF, prAny}: {grAny, grBoundary, 40},
-
- // GB3.
- {grCR, prLF}: {grControlLF, grNoBoundary, 30},
-
- // GB6.
- {grAny, prL}: {grL, grBoundary, 9990},
- {grL, prL}: {grL, grNoBoundary, 60},
- {grL, prV}: {grLVV, grNoBoundary, 60},
- {grL, prLV}: {grLVV, grNoBoundary, 60},
- {grL, prLVT}: {grLVTT, grNoBoundary, 60},
-
- // GB7.
- {grAny, prLV}: {grLVV, grBoundary, 9990},
- {grAny, prV}: {grLVV, grBoundary, 9990},
- {grLVV, prV}: {grLVV, grNoBoundary, 70},
- {grLVV, prT}: {grLVTT, grNoBoundary, 70},
-
- // GB8.
- {grAny, prLVT}: {grLVTT, grBoundary, 9990},
- {grAny, prT}: {grLVTT, grBoundary, 9990},
- {grLVTT, prT}: {grLVTT, grNoBoundary, 80},
-
- // GB9.
- {grAny, prExtend}: {grAny, grNoBoundary, 90},
- {grAny, prZWJ}: {grAny, grNoBoundary, 90},
-
- // GB9a.
- {grAny, prSpacingMark}: {grAny, grNoBoundary, 91},
-
- // GB9b.
- {grAny, prPrepend}: {grPrepend, grBoundary, 9990},
- {grPrepend, prAny}: {grAny, grNoBoundary, 92},
-
- // GB11.
- {grAny, prExtendedPictographic}: {grExtendedPictographic, grBoundary, 9990},
- {grExtendedPictographic, prExtend}: {grExtendedPictographic, grNoBoundary, 110},
- {grExtendedPictographic, prZWJ}: {grExtendedPictographicZWJ, grNoBoundary, 110},
- {grExtendedPictographicZWJ, prExtendedPictographic}: {grExtendedPictographic, grNoBoundary, 110},
-
- // GB12 / GB13.
- {grAny, prRegionalIndicator}: {grRIOdd, grBoundary, 9990},
- {grRIOdd, prRegionalIndicator}: {grRIEven, grNoBoundary, 120},
- {grRIEven, prRegionalIndicator}: {grRIOdd, grBoundary, 120},
+ case grCR | prAny<<32:
+ return grAny, grBoundary, 40
+ case grControlLF | prAny<<32:
+ return grAny, grBoundary, 40
+
+ // GB3
+ case grCR | prLF<<32:
+ return grControlLF, grNoBoundary, 30
+
+ // GB6
+ case grAny | prL<<32:
+ return grL, grBoundary, 9990
+ case grL | prL<<32:
+ return grL, grNoBoundary, 60
+ case grL | prV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLV<<32:
+ return grLVV, grNoBoundary, 60
+ case grL | prLVT<<32:
+ return grLVTT, grNoBoundary, 60
+
+ // GB7
+ case grAny | prLV<<32:
+ return grLVV, grBoundary, 9990
+ case grAny | prV<<32:
+ return grLVV, grBoundary, 9990
+ case grLVV | prV<<32:
+ return grLVV, grNoBoundary, 70
+ case grLVV | prT<<32:
+ return grLVTT, grNoBoundary, 70
+
+ // GB8
+ case grAny | prLVT<<32:
+ return grLVTT, grBoundary, 9990
+ case grAny | prT<<32:
+ return grLVTT, grBoundary, 9990
+ case grLVTT | prT<<32:
+ return grLVTT, grNoBoundary, 80
+
+ // GB9
+ case grAny | prExtend<<32:
+ return grAny, grNoBoundary, 90
+ case grAny | prZWJ<<32:
+ return grAny, grNoBoundary, 90
+
+ // GB9a
+ case grAny | prSpacingMark<<32:
+ return grAny, grNoBoundary, 91
+
+ // GB9b
+ case grAny | prPrepend<<32:
+ return grPrepend, grBoundary, 9990
+ case grPrepend | prAny<<32:
+ return grAny, grNoBoundary, 92
+
+ // GB11
+ case grAny | prExtendedPictographic<<32:
+ return grExtendedPictographic, grBoundary, 9990
+ case grExtendedPictographic | prExtend<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+ case grExtendedPictographic | prZWJ<<32:
+ return grExtendedPictographicZWJ, grNoBoundary, 110
+ case grExtendedPictographicZWJ | prExtendedPictographic<<32:
+ return grExtendedPictographic, grNoBoundary, 110
+
+ // GB12 / GB13
+ case grAny | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 9990
+ case grRIOdd | prRegionalIndicator<<32:
+ return grRIEven, grNoBoundary, 120
+ case grRIEven | prRegionalIndicator<<32:
+ return grRIOdd, grBoundary, 120
+ default:
+ return -1, -1, -1
+ }
}
// transitionGraphemeState determines the new state of the grapheme cluster
@@ -97,40 +135,40 @@ var grTransitions = map[[2]int][3]int{
// table) and whether a cluster boundary was detected.
func transitionGraphemeState(state int, r rune) (newState, prop int, boundary bool) {
// Determine the property of the next character.
- prop = property(graphemeCodePoints, r)
+ prop = propertyGraphemes(r)
// Find the applicable transition.
- transition, ok := grTransitions[[2]int{state, prop}]
- if ok {
+ nextState, nextProp, _ := grTransitions(state, prop)
+ if nextState >= 0 {
// We have a specific transition. We'll use it.
- return transition[0], prop, transition[1] == grBoundary
+ return nextState, prop, nextProp == grBoundary
}
// No specific transition found. Try the less specific ones.
- transAnyProp, okAnyProp := grTransitions[[2]int{state, prAny}]
- transAnyState, okAnyState := grTransitions[[2]int{grAny, prop}]
- if okAnyProp && okAnyState {
+ anyPropState, anyPropProp, anyPropRule := grTransitions(state, prAny)
+ anyStateState, anyStateProp, anyStateRule := grTransitions(grAny, prop)
+ if anyPropState >= 0 && anyStateState >= 0 {
// Both apply. We'll use a mix (see comments for grTransitions).
- newState = transAnyState[0]
- boundary = transAnyState[1] == grBoundary
- if transAnyProp[2] < transAnyState[2] {
- boundary = transAnyProp[1] == grBoundary
+ newState = anyStateState
+ boundary = anyStateProp == grBoundary
+ if anyPropRule < anyStateRule {
+ boundary = anyPropProp == grBoundary
}
return
}
- if okAnyProp {
+ if anyPropState >= 0 {
// We only have a specific state.
- return transAnyProp[0], prop, transAnyProp[1] == grBoundary
+ return anyPropState, prop, anyPropProp == grBoundary
// This branch will probably never be reached because okAnyState will
// always be true given the current transition map. But we keep it here
// for future modifications to the transition map where this may not be
// true anymore.
}
- if okAnyState {
+ if anyStateState >= 0 {
// We only have a specific property.
- return transAnyState[0], prop, transAnyState[1] == grBoundary
+ return anyStateState, prop, anyStateProp == grBoundary
}
// No known transition. GB999: Any ÷ Any.
diff --git a/vendor/github.com/rivo/uniseg/line.go b/vendor/github.com/rivo/uniseg/line.go
index 87f28503f..7a46318d9 100644
--- a/vendor/github.com/rivo/uniseg/line.go
+++ b/vendor/github.com/rivo/uniseg/line.go
@@ -80,7 +80,7 @@ func FirstLineSegment(b []byte, state int) (segment, rest []byte, mustBreak bool
}
}
-// FirstLineSegmentInString is like FirstLineSegment() but its input and outputs
+// FirstLineSegmentInString is like [FirstLineSegment] but its input and outputs
// are strings.
func FirstLineSegmentInString(str string, state int) (segment, rest string, mustBreak bool, newState int) {
// An empty byte slice returns nothing.
@@ -122,13 +122,13 @@ func FirstLineSegmentInString(str string, state int) (segment, rest string, must
// [UAX #14]: https://www.unicode.org/reports/tr14/#Algorithm
func HasTrailingLineBreak(b []byte) bool {
r, _ := utf8.DecodeLastRune(b)
- property, _ := propertyWithGenCat(lineBreakCodePoints, r)
- return property == lbBK || property == lbCR || property == lbLF || property == lbNL
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
}
// HasTrailingLineBreakInString is like [HasTrailingLineBreak] but for a string.
func HasTrailingLineBreakInString(str string) bool {
r, _ := utf8.DecodeLastRuneInString(str)
- property, _ := propertyWithGenCat(lineBreakCodePoints, r)
- return property == lbBK || property == lbCR || property == lbLF || property == lbNL
+ property, _ := propertyLineBreak(r)
+ return property == prBK || property == prCR || property == prLF || property == prNL
}
diff --git a/vendor/github.com/rivo/uniseg/lineproperties.go b/vendor/github.com/rivo/uniseg/lineproperties.go
index 32169306e..ac7fac4c0 100644
--- a/vendor/github.com/rivo/uniseg/lineproperties.go
+++ b/vendor/github.com/rivo/uniseg/lineproperties.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// lineBreakCodePoints are taken from
-// https://www.unicode.org/Public/14.0.0/ucd/LineBreak.txt
+// https://www.unicode.org/Public/15.0.0/ucd/LineBreak.txt
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var lineBreakCodePoints = [][4]int{
{0x0000, 0x0008, prCM, gcCc}, // [9] <control-0000>..<control-0008>
@@ -439,6 +439,7 @@ var lineBreakCodePoints = [][4]int{
{0x0CE2, 0x0CE3, prCM, gcMn}, // [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
{0x0CE6, 0x0CEF, prNU, gcNd}, // [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
{0x0CF1, 0x0CF2, prAL, gcLo}, // [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prCM, gcMc}, // KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
{0x0D00, 0x0D01, prCM, gcMn}, // [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
{0x0D02, 0x0D03, prCM, gcMc}, // [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
{0x0D04, 0x0D0C, prAL, gcLo}, // [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
@@ -500,7 +501,7 @@ var lineBreakCodePoints = [][4]int{
{0x0EBD, 0x0EBD, prSA, gcLo}, // LAO SEMIVOWEL SIGN NYO
{0x0EC0, 0x0EC4, prSA, gcLo}, // [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
{0x0EC6, 0x0EC6, prSA, gcLm}, // LAO KO LA
- {0x0EC8, 0x0ECD, prSA, gcMn}, // [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0EC8, 0x0ECE, prSA, gcMn}, // [7] LAO TONE MAI EK..LAO YAMAKKAN
{0x0ED0, 0x0ED9, prNU, gcNd}, // [10] LAO DIGIT ZERO..LAO DIGIT NINE
{0x0EDC, 0x0EDF, prSA, gcLo}, // [4] LAO HO NO..LAO LETTER KHMU NYO
{0x0F00, 0x0F00, prAL, gcLo}, // TIBETAN SYLLABLE OM
@@ -813,7 +814,11 @@ var lineBreakCodePoints = [][4]int{
{0x1D79, 0x1D7F, prAL, gcLl}, // [7] LATIN SMALL LETTER INSULAR G..LATIN SMALL LETTER UPSILON WITH STROKE
{0x1D80, 0x1D9A, prAL, gcLl}, // [27] LATIN SMALL LETTER B WITH PALATAL HOOK..LATIN SMALL LETTER EZH WITH RETROFLEX HOOK
{0x1D9B, 0x1DBF, prAL, gcLm}, // [37] MODIFIER LETTER SMALL TURNED ALPHA..MODIFIER LETTER SMALL THETA
- {0x1DC0, 0x1DFF, prCM, gcMn}, // [64] COMBINING DOTTED GRAVE ACCENT..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
+ {0x1DC0, 0x1DCC, prCM, gcMn}, // [13] COMBINING DOTTED GRAVE ACCENT..COMBINING MACRON-BREVE
+ {0x1DCD, 0x1DCD, prGL, gcMn}, // COMBINING DOUBLE CIRCUMFLEX ABOVE
+ {0x1DCE, 0x1DFB, prCM, gcMn}, // [46] COMBINING OGONEK ABOVE..COMBINING DELETION MARK
+ {0x1DFC, 0x1DFC, prGL, gcMn}, // COMBINING DOUBLE INVERTED BREVE BELOW
+ {0x1DFD, 0x1DFF, prCM, gcMn}, // [3] COMBINING ALMOST EQUAL TO BELOW..COMBINING RIGHT ARROWHEAD AND DOWN ARROWHEAD BELOW
{0x1E00, 0x1EFF, prAL, gcLC}, // [256] LATIN CAPITAL LETTER A WITH RING BELOW..LATIN SMALL LETTER Y WITH LOOP
{0x1F00, 0x1F15, prAL, gcLC}, // [22] GREEK SMALL LETTER ALPHA WITH PSILI..GREEK SMALL LETTER EPSILON WITH DASIA AND OXIA
{0x1F18, 0x1F1D, prAL, gcLu}, // [6] GREEK CAPITAL LETTER EPSILON WITH PSILI..GREEK CAPITAL LETTER EPSILON WITH DASIA AND OXIA
@@ -889,7 +894,7 @@ var lineBreakCodePoints = [][4]int{
{0x2054, 0x2054, prAL, gcPc}, // INVERTED UNDERTIE
{0x2055, 0x2055, prAL, gcPo}, // FLOWER PUNCTUATION MARK
{0x2056, 0x2056, prBA, gcPo}, // THREE DOT PUNCTUATION
- {0x2057, 0x2057, prAL, gcPo}, // QUADRUPLE PRIME
+ {0x2057, 0x2057, prPO, gcPo}, // QUADRUPLE PRIME
{0x2058, 0x205B, prBA, gcPo}, // [4] FOUR DOT PUNCTUATION..FOUR DOT MARK
{0x205C, 0x205C, prAL, gcPo}, // DOTTED CROSS
{0x205D, 0x205E, prBA, gcPo}, // [2] TRICOLON..VERTICAL FOUR DOTS
@@ -2751,6 +2756,7 @@ var lineBreakCodePoints = [][4]int{
{0x10EAB, 0x10EAC, prCM, gcMn}, // [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
{0x10EAD, 0x10EAD, prBA, gcPd}, // YEZIDI HYPHENATION MARK
{0x10EB0, 0x10EB1, prAL, gcLo}, // [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prCM, gcMn}, // [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
{0x10F00, 0x10F1C, prAL, gcLo}, // [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
{0x10F1D, 0x10F26, prAL, gcNo}, // [10] OLD SOGDIAN NUMBER ONE..OLD SOGDIAN FRACTION ONE HALF
{0x10F27, 0x10F27, prAL, gcLo}, // OLD SOGDIAN LIGATURE AYIN-DALETH
@@ -2840,6 +2846,8 @@ var lineBreakCodePoints = [][4]int{
{0x1123B, 0x1123C, prBA, gcPo}, // [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK
{0x1123D, 0x1123D, prAL, gcPo}, // KHOJKI ABBREVIATION SIGN
{0x1123E, 0x1123E, prCM, gcMn}, // KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prAL, gcLo}, // [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prCM, gcMn}, // KHOJKI VOWEL SIGN VOCALIC R
{0x11280, 0x11286, prAL, gcLo}, // [7] MULTANI LETTER A..MULTANI LETTER GA
{0x11288, 0x11288, prAL, gcLo}, // MULTANI LETTER GHA
{0x1128A, 0x1128D, prAL, gcLo}, // [4] MULTANI LETTER CA..MULTANI LETTER JJA
@@ -3013,6 +3021,7 @@ var lineBreakCodePoints = [][4]int{
{0x11AA1, 0x11AA2, prBA, gcPo}, // [2] SOYOMBO TERMINAL MARK-1..SOYOMBO TERMINAL MARK-2
{0x11AB0, 0x11ABF, prAL, gcLo}, // [16] CANADIAN SYLLABICS NATTILIK HI..CANADIAN SYLLABICS SPA
{0x11AC0, 0x11AF8, prAL, gcLo}, // [57] PAU CIN HAU LETTER PA..PAU CIN HAU GLOTTAL STOP FINAL
+ {0x11B00, 0x11B09, prBB, gcPo}, // [10] DEVANAGARI HEAD MARK..DEVANAGARI SIGN MINDU
{0x11C00, 0x11C08, prAL, gcLo}, // [9] BHAIKSUKI LETTER A..BHAIKSUKI LETTER VOCALIC L
{0x11C0A, 0x11C2E, prAL, gcLo}, // [37] BHAIKSUKI LETTER E..BHAIKSUKI LETTER HA
{0x11C2F, 0x11C2F, prCM, gcMc}, // BHAIKSUKI VOWEL SIGN AA
@@ -3059,6 +3068,20 @@ var lineBreakCodePoints = [][4]int{
{0x11EF3, 0x11EF4, prCM, gcMn}, // [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
{0x11EF5, 0x11EF6, prCM, gcMc}, // [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
{0x11EF7, 0x11EF8, prAL, gcPo}, // [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prCM, gcMn}, // [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prAL, gcLo}, // KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prCM, gcMc}, // KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prAL, gcLo}, // [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prAL, gcLo}, // [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prCM, gcMc}, // [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prCM, gcMn}, // [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prCM, gcMc}, // [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prCM, gcMn}, // KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prCM, gcMc}, // KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prCM, gcMn}, // KAWI CONJOINER
+ {0x11F43, 0x11F44, prBA, gcPo}, // [2] KAWI DANDA..KAWI DOUBLE DANDA
+ {0x11F45, 0x11F4F, prID, gcPo}, // [11] KAWI PUNCTUATION SECTION MARKER..KAWI PUNCTUATION CLOSING SPIRAL
+ {0x11F50, 0x11F59, prNU, gcNd}, // [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
{0x11FB0, 0x11FB0, prAL, gcLo}, // LISU LETTER YHA
{0x11FC0, 0x11FD4, prAL, gcNo}, // [21] TAMIL FRACTION ONE THREE-HUNDRED-AND-TWENTIETH..TAMIL FRACTION DOWNSCALING FACTOR KIIZH
{0x11FD5, 0x11FDC, prAL, gcSo}, // [8] TAMIL SIGN NEL..TAMIL SIGN MUKKURUNI
@@ -3084,10 +3107,18 @@ var lineBreakCodePoints = [][4]int{
{0x1328A, 0x13378, prAL, gcLo}, // [239] EGYPTIAN HIEROGLYPH O037..EGYPTIAN HIEROGLYPH V011
{0x13379, 0x13379, prOP, gcLo}, // EGYPTIAN HIEROGLYPH V011A
{0x1337A, 0x1337B, prCL, gcLo}, // [2] EGYPTIAN HIEROGLYPH V011B..EGYPTIAN HIEROGLYPH V011C
- {0x1337C, 0x1342E, prAL, gcLo}, // [179] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH AA032
+ {0x1337C, 0x1342F, prAL, gcLo}, // [180] EGYPTIAN HIEROGLYPH V012..EGYPTIAN HIEROGLYPH V011D
{0x13430, 0x13436, prGL, gcCf}, // [7] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH OVERLAY MIDDLE
{0x13437, 0x13437, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN SEGMENT
{0x13438, 0x13438, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13439, 0x1343B, prGL, gcCf}, // [3] EGYPTIAN HIEROGLYPH INSERT AT MIDDLE..EGYPTIAN HIEROGLYPH INSERT AT BOTTOM
+ {0x1343C, 0x1343C, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN ENCLOSURE
+ {0x1343D, 0x1343D, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END ENCLOSURE
+ {0x1343E, 0x1343E, prOP, gcCf}, // EGYPTIAN HIEROGLYPH BEGIN WALLED ENCLOSURE
+ {0x1343F, 0x1343F, prCL, gcCf}, // EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prCM, gcMn}, // EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prAL, gcLo}, // [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prCM, gcMn}, // [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
{0x14400, 0x145CD, prAL, gcLo}, // [462] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A409
{0x145CE, 0x145CE, prOP, gcLo}, // ANATOLIAN HIEROGLYPH A410 BEGIN LOGOGRAM MARK
{0x145CF, 0x145CF, prCL, gcLo}, // ANATOLIAN HIEROGLYPH A410A END LOGOGRAM MARK
@@ -3137,7 +3168,9 @@ var lineBreakCodePoints = [][4]int{
{0x1AFFD, 0x1AFFE, prAL, gcLm}, // [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
{0x1B000, 0x1B0FF, prID, gcLo}, // [256] KATAKANA LETTER ARCHAIC E..HENTAIGANA LETTER RE-2
{0x1B100, 0x1B122, prID, gcLo}, // [35] HENTAIGANA LETTER RE-3..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prCJ, gcLo}, // HIRAGANA LETTER SMALL KO
{0x1B150, 0x1B152, prCJ, gcLo}, // [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prCJ, gcLo}, // KATAKANA LETTER SMALL KO
{0x1B164, 0x1B167, prCJ, gcLo}, // [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
{0x1B170, 0x1B2FB, prID, gcLo}, // [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
{0x1BC00, 0x1BC6A, prAL, gcLo}, // [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
@@ -3168,6 +3201,7 @@ var lineBreakCodePoints = [][4]int{
{0x1D200, 0x1D241, prAL, gcSo}, // [66] GREEK VOCAL NOTATION SYMBOL-1..GREEK INSTRUMENTAL NOTATION SYMBOL-54
{0x1D242, 0x1D244, prCM, gcMn}, // [3] COMBINING GREEK MUSICAL TRISEME..COMBINING GREEK MUSICAL PENTASEME
{0x1D245, 0x1D245, prAL, gcSo}, // GREEK MUSICAL LEIMMA
+ {0x1D2C0, 0x1D2D3, prAL, gcNo}, // [20] KAKTOVIK NUMERAL ZERO..KAKTOVIK NUMERAL NINETEEN
{0x1D2E0, 0x1D2F3, prAL, gcNo}, // [20] MAYAN NUMERAL ZERO..MAYAN NUMERAL NINETEEN
{0x1D300, 0x1D356, prAL, gcSo}, // [87] MONOGRAM FOR EARTH..TETRAGRAM FOR FOSTERING
{0x1D360, 0x1D378, prAL, gcNo}, // [25] COUNTING ROD UNIT DIGIT ONE..TALLY MARK FIVE
@@ -3228,11 +3262,14 @@ var lineBreakCodePoints = [][4]int{
{0x1DF00, 0x1DF09, prAL, gcLl}, // [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
{0x1DF0A, 0x1DF0A, prAL, gcLo}, // LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
{0x1DF0B, 0x1DF1E, prAL, gcLl}, // [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prAL, gcLl}, // [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
{0x1E000, 0x1E006, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
{0x1E008, 0x1E018, prCM, gcMn}, // [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
{0x1E01B, 0x1E021, prCM, gcMn}, // [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
{0x1E023, 0x1E024, prCM, gcMn}, // [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
{0x1E026, 0x1E02A, prCM, gcMn}, // [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prAL, gcLm}, // [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prCM, gcMn}, // COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
{0x1E100, 0x1E12C, prAL, gcLo}, // [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
{0x1E130, 0x1E136, prCM, gcMn}, // [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
{0x1E137, 0x1E13D, prAL, gcLm}, // [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
@@ -3245,6 +3282,10 @@ var lineBreakCodePoints = [][4]int{
{0x1E2EC, 0x1E2EF, prCM, gcMn}, // [4] WANCHO TONE TUP..WANCHO TONE KOINI
{0x1E2F0, 0x1E2F9, prNU, gcNd}, // [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
{0x1E2FF, 0x1E2FF, prPR, gcSc}, // WANCHO NGUN SIGN
+ {0x1E4D0, 0x1E4EA, prAL, gcLo}, // [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prAL, gcLm}, // NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prCM, gcMn}, // [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prNU, gcNd}, // [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
{0x1E7E0, 0x1E7E6, prAL, gcLo}, // [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
{0x1E7E8, 0x1E7EB, prAL, gcLo}, // [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
{0x1E7ED, 0x1E7EE, prAL, gcLo}, // [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
@@ -3412,16 +3453,18 @@ var lineBreakCodePoints = [][4]int{
{0x1F6C1, 0x1F6CB, prID, gcSo}, // [11] BATHTUB..COUCH AND LAMP
{0x1F6CC, 0x1F6CC, prEB, gcSo}, // SLEEPING ACCOMMODATION
{0x1F6CD, 0x1F6D7, prID, gcSo}, // [11] SHOPPING BAGS..ELEVATOR
- {0x1F6D8, 0x1F6DC, prID, gcCn}, // [5] <reserved-1F6D8>..<reserved-1F6DC>
- {0x1F6DD, 0x1F6EC, prID, gcSo}, // [16] PLAYGROUND SLIDE..AIRPLANE ARRIVING
+ {0x1F6D8, 0x1F6DB, prID, gcCn}, // [4] <reserved-1F6D8>..<reserved-1F6DB>
+ {0x1F6DC, 0x1F6EC, prID, gcSo}, // [17] WIRELESS..AIRPLANE ARRIVING
{0x1F6ED, 0x1F6EF, prID, gcCn}, // [3] <reserved-1F6ED>..<reserved-1F6EF>
{0x1F6F0, 0x1F6FC, prID, gcSo}, // [13] SATELLITE..ROLLER SKATE
{0x1F6FD, 0x1F6FF, prID, gcCn}, // [3] <reserved-1F6FD>..<reserved-1F6FF>
{0x1F700, 0x1F773, prAL, gcSo}, // [116] ALCHEMICAL SYMBOL FOR QUINTESSENCE..ALCHEMICAL SYMBOL FOR HALF OUNCE
- {0x1F774, 0x1F77F, prID, gcCn}, // [12] <reserved-1F774>..<reserved-1F77F>
+ {0x1F774, 0x1F776, prID, gcSo}, // [3] LOT OF FORTUNE..LUNAR ECLIPSE
+ {0x1F777, 0x1F77A, prID, gcCn}, // [4] <reserved-1F777>..<reserved-1F77A>
+ {0x1F77B, 0x1F77F, prID, gcSo}, // [5] HAUMEA..ORCUS
{0x1F780, 0x1F7D4, prAL, gcSo}, // [85] BLACK LEFT-POINTING ISOSCELES RIGHT TRIANGLE..HEAVY TWELVE POINTED PINWHEEL STAR
- {0x1F7D5, 0x1F7D8, prID, gcSo}, // [4] CIRCLED TRIANGLE..NEGATIVE CIRCLED SQUARE
- {0x1F7D9, 0x1F7DF, prID, gcCn}, // [7] <reserved-1F7D9>..<reserved-1F7DF>
+ {0x1F7D5, 0x1F7D9, prID, gcSo}, // [5] CIRCLED TRIANGLE..NINE POINTED WHITE STAR
+ {0x1F7DA, 0x1F7DF, prID, gcCn}, // [6] <reserved-1F7DA>..<reserved-1F7DF>
{0x1F7E0, 0x1F7EB, prID, gcSo}, // [12] LARGE ORANGE CIRCLE..LARGE BROWN SQUARE
{0x1F7EC, 0x1F7EF, prID, gcCn}, // [4] <reserved-1F7EC>..<reserved-1F7EF>
{0x1F7F0, 0x1F7F0, prID, gcSo}, // HEAVY EQUALS SIGN
@@ -3467,33 +3510,29 @@ var lineBreakCodePoints = [][4]int{
{0x1FA54, 0x1FA5F, prID, gcCn}, // [12] <reserved-1FA54>..<reserved-1FA5F>
{0x1FA60, 0x1FA6D, prID, gcSo}, // [14] XIANGQI RED GENERAL..XIANGQI BLACK SOLDIER
{0x1FA6E, 0x1FA6F, prID, gcCn}, // [2] <reserved-1FA6E>..<reserved-1FA6F>
- {0x1FA70, 0x1FA74, prID, gcSo}, // [5] BALLET SHOES..THONG SANDAL
- {0x1FA75, 0x1FA77, prID, gcCn}, // [3] <reserved-1FA75>..<reserved-1FA77>
- {0x1FA78, 0x1FA7C, prID, gcSo}, // [5] DROP OF BLOOD..CRUTCH
+ {0x1FA70, 0x1FA7C, prID, gcSo}, // [13] BALLET SHOES..CRUTCH
{0x1FA7D, 0x1FA7F, prID, gcCn}, // [3] <reserved-1FA7D>..<reserved-1FA7F>
- {0x1FA80, 0x1FA86, prID, gcSo}, // [7] YO-YO..NESTING DOLLS
- {0x1FA87, 0x1FA8F, prID, gcCn}, // [9] <reserved-1FA87>..<reserved-1FA8F>
- {0x1FA90, 0x1FAAC, prID, gcSo}, // [29] RINGED PLANET..HAMSA
- {0x1FAAD, 0x1FAAF, prID, gcCn}, // [3] <reserved-1FAAD>..<reserved-1FAAF>
- {0x1FAB0, 0x1FABA, prID, gcSo}, // [11] FLY..NEST WITH EGGS
- {0x1FABB, 0x1FABF, prID, gcCn}, // [5] <reserved-1FABB>..<reserved-1FABF>
- {0x1FAC0, 0x1FAC2, prID, gcSo}, // [3] ANATOMICAL HEART..PEOPLE HUGGING
+ {0x1FA80, 0x1FA88, prID, gcSo}, // [9] YO-YO..FLUTE
+ {0x1FA89, 0x1FA8F, prID, gcCn}, // [7] <reserved-1FA89>..<reserved-1FA8F>
+ {0x1FA90, 0x1FABD, prID, gcSo}, // [46] RINGED PLANET..WING
+ {0x1FABE, 0x1FABE, prID, gcCn}, // <reserved-1FABE>
+ {0x1FABF, 0x1FAC2, prID, gcSo}, // [4] GOOSE..PEOPLE HUGGING
{0x1FAC3, 0x1FAC5, prEB, gcSo}, // [3] PREGNANT MAN..PERSON WITH CROWN
- {0x1FAC6, 0x1FACF, prID, gcCn}, // [10] <reserved-1FAC6>..<reserved-1FACF>
- {0x1FAD0, 0x1FAD9, prID, gcSo}, // [10] BLUEBERRIES..JAR
- {0x1FADA, 0x1FADF, prID, gcCn}, // [6] <reserved-1FADA>..<reserved-1FADF>
- {0x1FAE0, 0x1FAE7, prID, gcSo}, // [8] MELTING FACE..BUBBLES
- {0x1FAE8, 0x1FAEF, prID, gcCn}, // [8] <reserved-1FAE8>..<reserved-1FAEF>
- {0x1FAF0, 0x1FAF6, prEB, gcSo}, // [7] HAND WITH INDEX FINGER AND THUMB CROSSED..HEART HANDS
- {0x1FAF7, 0x1FAFF, prID, gcCn}, // [9] <reserved-1FAF7>..<reserved-1FAFF>
+ {0x1FAC6, 0x1FACD, prID, gcCn}, // [8] <reserved-1FAC6>..<reserved-1FACD>
+ {0x1FACE, 0x1FADB, prID, gcSo}, // [14] MOOSE..PEA POD
+ {0x1FADC, 0x1FADF, prID, gcCn}, // [4] <reserved-1FADC>..<reserved-1FADF>
+ {0x1FAE0, 0x1FAE8, prID, gcSo}, // [9] MELTING FACE..SHAKING FACE
+ {0x1FAE9, 0x1FAEF, prID, gcCn}, // [7] <reserved-1FAE9>..<reserved-1FAEF>
+ {0x1FAF0, 0x1FAF8, prEB, gcSo}, // [9] HAND WITH INDEX FINGER AND THUMB CROSSED..RIGHTWARDS PUSHING HAND
+ {0x1FAF9, 0x1FAFF, prID, gcCn}, // [7] <reserved-1FAF9>..<reserved-1FAFF>
{0x1FB00, 0x1FB92, prAL, gcSo}, // [147] BLOCK SEXTANT-1..UPPER HALF INVERSE MEDIUM SHADE AND LOWER HALF BLOCK
{0x1FB94, 0x1FBCA, prAL, gcSo}, // [55] LEFT HALF INVERSE MEDIUM SHADE AND RIGHT HALF BLOCK..WHITE UP-POINTING CHEVRON
{0x1FBF0, 0x1FBF9, prNU, gcNd}, // [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
{0x1FC00, 0x1FFFD, prID, gcCn}, // [1022] <reserved-1FC00>..<reserved-1FFFD>
{0x20000, 0x2A6DF, prID, gcLo}, // [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
{0x2A6E0, 0x2A6FF, prID, gcCn}, // [32] <reserved-2A6E0>..<reserved-2A6FF>
- {0x2A700, 0x2B738, prID, gcLo}, // [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738
- {0x2B739, 0x2B73F, prID, gcCn}, // [7] <reserved-2B739>..<reserved-2B73F>
+ {0x2A700, 0x2B739, prID, gcLo}, // [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739
+ {0x2B73A, 0x2B73F, prID, gcCn}, // [6] <reserved-2B73A>..<reserved-2B73F>
{0x2B740, 0x2B81D, prID, gcLo}, // [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
{0x2B81E, 0x2B81F, prID, gcCn}, // [2] <reserved-2B81E>..<reserved-2B81F>
{0x2B820, 0x2CEA1, prID, gcLo}, // [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
@@ -3504,7 +3543,9 @@ var lineBreakCodePoints = [][4]int{
{0x2FA1E, 0x2FA1F, prID, gcCn}, // [2] <reserved-2FA1E>..<reserved-2FA1F>
{0x2FA20, 0x2FFFD, prID, gcCn}, // [1502] <reserved-2FA20>..<reserved-2FFFD>
{0x30000, 0x3134A, prID, gcLo}, // [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
- {0x3134B, 0x3FFFD, prID, gcCn}, // [60595] <reserved-3134B>..<reserved-3FFFD>
+ {0x3134B, 0x3134F, prID, gcCn}, // [5] <reserved-3134B>..<reserved-3134F>
+ {0x31350, 0x323AF, prID, gcLo}, // [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF
+ {0x323B0, 0x3FFFD, prID, gcCn}, // [56398] <reserved-323B0>..<reserved-3FFFD>
{0xE0001, 0xE0001, prCM, gcCf}, // LANGUAGE TAG
{0xE0020, 0xE007F, prCM, gcCf}, // [96] TAG SPACE..CANCEL TAG
{0xE0100, 0xE01EF, prCM, gcMn}, // [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
diff --git a/vendor/github.com/rivo/uniseg/linerules.go b/vendor/github.com/rivo/uniseg/linerules.go
index d2ad51680..7708ae0fb 100644
--- a/vendor/github.com/rivo/uniseg/linerules.go
+++ b/vendor/github.com/rivo/uniseg/linerules.go
@@ -64,222 +64,381 @@ const (
LineMustBreak // You must break the line here.
)
-// The line break parser's state transitions. It's anologous to grTransitions,
-// see comments there for details. Unicode version 14.0.0.
-var lbTransitions = map[[2]int][3]int{
+// lbTransitions implements the line break parser's state transitions. It's
+// anologous to [grTransitions], see comments there for details.
+//
+// Unicode version 15.0.0.
+func lbTransitions(state, prop int) (newState, lineBreak, rule int) {
+ switch uint64(state) | uint64(prop)<<32 {
// LB4.
- {lbAny, prBK}: {lbBK, LineCanBreak, 310},
- {lbBK, prAny}: {lbAny, LineMustBreak, 40},
+ case lbBK | prAny<<32:
+ return lbAny, LineMustBreak, 40
// LB5.
- {lbAny, prCR}: {lbCR, LineCanBreak, 310},
- {lbAny, prLF}: {lbLF, LineCanBreak, 310},
- {lbAny, prNL}: {lbNL, LineCanBreak, 310},
- {lbCR, prLF}: {lbLF, LineDontBreak, 50},
- {lbCR, prAny}: {lbAny, LineMustBreak, 50},
- {lbLF, prAny}: {lbAny, LineMustBreak, 50},
- {lbNL, prAny}: {lbAny, LineMustBreak, 50},
+ case lbCR | prLF<<32:
+ return lbLF, LineDontBreak, 50
+ case lbCR | prAny<<32:
+ return lbAny, LineMustBreak, 50
+ case lbLF | prAny<<32:
+ return lbAny, LineMustBreak, 50
+ case lbNL | prAny<<32:
+ return lbAny, LineMustBreak, 50
// LB6.
- {lbAny, prBK}: {lbBK, LineDontBreak, 60},
- {lbAny, prCR}: {lbCR, LineDontBreak, 60},
- {lbAny, prLF}: {lbLF, LineDontBreak, 60},
- {lbAny, prNL}: {lbNL, LineDontBreak, 60},
+ case lbAny | prBK<<32:
+ return lbBK, LineDontBreak, 60
+ case lbAny | prCR<<32:
+ return lbCR, LineDontBreak, 60
+ case lbAny | prLF<<32:
+ return lbLF, LineDontBreak, 60
+ case lbAny | prNL<<32:
+ return lbNL, LineDontBreak, 60
// LB7.
- {lbAny, prSP}: {lbSP, LineDontBreak, 70},
- {lbAny, prZW}: {lbZW, LineDontBreak, 70},
+ case lbAny | prSP<<32:
+ return lbSP, LineDontBreak, 70
+ case lbAny | prZW<<32:
+ return lbZW, LineDontBreak, 70
// LB8.
- {lbZW, prSP}: {lbZW, LineDontBreak, 70},
- {lbZW, prAny}: {lbAny, LineCanBreak, 80},
+ case lbZW | prSP<<32:
+ return lbZW, LineDontBreak, 70
+ case lbZW | prAny<<32:
+ return lbAny, LineCanBreak, 80
// LB11.
- {lbAny, prWJ}: {lbWJ, LineDontBreak, 110},
- {lbWJ, prAny}: {lbAny, LineDontBreak, 110},
+ case lbAny | prWJ<<32:
+ return lbWJ, LineDontBreak, 110
+ case lbWJ | prAny<<32:
+ return lbAny, LineDontBreak, 110
// LB12.
- {lbAny, prGL}: {lbGL, LineCanBreak, 310},
- {lbGL, prAny}: {lbAny, LineDontBreak, 120},
+ case lbAny | prGL<<32:
+ return lbGL, LineCanBreak, 310
+ case lbGL | prAny<<32:
+ return lbAny, LineDontBreak, 120
// LB13 (simple transitions).
- {lbAny, prCL}: {lbCL, LineCanBreak, 310},
- {lbAny, prCP}: {lbCP, LineCanBreak, 310},
- {lbAny, prEX}: {lbEX, LineDontBreak, 130},
- {lbAny, prIS}: {lbIS, LineCanBreak, 310},
- {lbAny, prSY}: {lbSY, LineCanBreak, 310},
+ case lbAny | prCL<<32:
+ return lbCL, LineCanBreak, 310
+ case lbAny | prCP<<32:
+ return lbCP, LineCanBreak, 310
+ case lbAny | prEX<<32:
+ return lbEX, LineDontBreak, 130
+ case lbAny | prIS<<32:
+ return lbIS, LineCanBreak, 310
+ case lbAny | prSY<<32:
+ return lbSY, LineCanBreak, 310
// LB14.
- {lbAny, prOP}: {lbOP, LineCanBreak, 310},
- {lbOP, prSP}: {lbOP, LineDontBreak, 70},
- {lbOP, prAny}: {lbAny, LineDontBreak, 140},
+ case lbAny | prOP<<32:
+ return lbOP, LineCanBreak, 310
+ case lbOP | prSP<<32:
+ return lbOP, LineDontBreak, 70
+ case lbOP | prAny<<32:
+ return lbAny, LineDontBreak, 140
// LB15.
- {lbQU, prSP}: {lbQUSP, LineDontBreak, 70},
- {lbQU, prOP}: {lbOP, LineDontBreak, 150},
- {lbQUSP, prOP}: {lbOP, LineDontBreak, 150},
+ case lbQU | prSP<<32:
+ return lbQUSP, LineDontBreak, 70
+ case lbQU | prOP<<32:
+ return lbOP, LineDontBreak, 150
+ case lbQUSP | prOP<<32:
+ return lbOP, LineDontBreak, 150
// LB16.
- {lbCL, prSP}: {lbCLCPSP, LineDontBreak, 70},
- {lbNUCL, prSP}: {lbCLCPSP, LineDontBreak, 70},
- {lbCP, prSP}: {lbCLCPSP, LineDontBreak, 70},
- {lbNUCP, prSP}: {lbCLCPSP, LineDontBreak, 70},
- {lbCL, prNS}: {lbNS, LineDontBreak, 160},
- {lbNUCL, prNS}: {lbNS, LineDontBreak, 160},
- {lbCP, prNS}: {lbNS, LineDontBreak, 160},
- {lbNUCP, prNS}: {lbNS, LineDontBreak, 160},
- {lbCLCPSP, prNS}: {lbNS, LineDontBreak, 160},
+ case lbCL | prSP<<32:
+ return lbCLCPSP, LineDontBreak, 70
+ case lbNUCL | prSP<<32:
+ return lbCLCPSP, LineDontBreak, 70
+ case lbCP | prSP<<32:
+ return lbCLCPSP, LineDontBreak, 70
+ case lbNUCP | prSP<<32:
+ return lbCLCPSP, LineDontBreak, 70
+ case lbCL | prNS<<32:
+ return lbNS, LineDontBreak, 160
+ case lbNUCL | prNS<<32:
+ return lbNS, LineDontBreak, 160
+ case lbCP | prNS<<32:
+ return lbNS, LineDontBreak, 160
+ case lbNUCP | prNS<<32:
+ return lbNS, LineDontBreak, 160
+ case lbCLCPSP | prNS<<32:
+ return lbNS, LineDontBreak, 160
// LB17.
- {lbAny, prB2}: {lbB2, LineCanBreak, 310},
- {lbB2, prSP}: {lbB2SP, LineDontBreak, 70},
- {lbB2, prB2}: {lbB2, LineDontBreak, 170},
- {lbB2SP, prB2}: {lbB2, LineDontBreak, 170},
+ case lbAny | prB2<<32:
+ return lbB2, LineCanBreak, 310
+ case lbB2 | prSP<<32:
+ return lbB2SP, LineDontBreak, 70
+ case lbB2 | prB2<<32:
+ return lbB2, LineDontBreak, 170
+ case lbB2SP | prB2<<32:
+ return lbB2, LineDontBreak, 170
// LB18.
- {lbSP, prAny}: {lbAny, LineCanBreak, 180},
- {lbQUSP, prAny}: {lbAny, LineCanBreak, 180},
- {lbCLCPSP, prAny}: {lbAny, LineCanBreak, 180},
- {lbB2SP, prAny}: {lbAny, LineCanBreak, 180},
+ case lbSP | prAny<<32:
+ return lbAny, LineCanBreak, 180
+ case lbQUSP | prAny<<32:
+ return lbAny, LineCanBreak, 180
+ case lbCLCPSP | prAny<<32:
+ return lbAny, LineCanBreak, 180
+ case lbB2SP | prAny<<32:
+ return lbAny, LineCanBreak, 180
// LB19.
- {lbAny, prQU}: {lbQU, LineDontBreak, 190},
- {lbQU, prAny}: {lbAny, LineDontBreak, 190},
+ case lbAny | prQU<<32:
+ return lbQU, LineDontBreak, 190
+ case lbQU | prAny<<32:
+ return lbAny, LineDontBreak, 190
// LB20.
- {lbAny, prCB}: {lbCB, LineCanBreak, 200},
- {lbCB, prAny}: {lbAny, LineCanBreak, 200},
+ case lbAny | prCB<<32:
+ return lbCB, LineCanBreak, 200
+ case lbCB | prAny<<32:
+ return lbAny, LineCanBreak, 200
// LB21.
- {lbAny, prBA}: {lbBA, LineDontBreak, 210},
- {lbAny, prHY}: {lbHY, LineDontBreak, 210},
- {lbAny, prNS}: {lbNS, LineDontBreak, 210},
- {lbAny, prBB}: {lbBB, LineCanBreak, 310},
- {lbBB, prAny}: {lbAny, LineDontBreak, 210},
+ case lbAny | prBA<<32:
+ return lbBA, LineDontBreak, 210
+ case lbAny | prHY<<32:
+ return lbHY, LineDontBreak, 210
+ case lbAny | prNS<<32:
+ return lbNS, LineDontBreak, 210
+ case lbAny | prBB<<32:
+ return lbBB, LineCanBreak, 310
+ case lbBB | prAny<<32:
+ return lbAny, LineDontBreak, 210
// LB21a.
- {lbAny, prHL}: {lbHL, LineCanBreak, 310},
- {lbHL, prHY}: {lbLB21a, LineDontBreak, 210},
- {lbHL, prBA}: {lbLB21a, LineDontBreak, 210},
- {lbLB21a, prAny}: {lbAny, LineDontBreak, 211},
+ case lbAny | prHL<<32:
+ return lbHL, LineCanBreak, 310
+ case lbHL | prHY<<32:
+ return lbLB21a, LineDontBreak, 210
+ case lbHL | prBA<<32:
+ return lbLB21a, LineDontBreak, 210
+ case lbLB21a | prAny<<32:
+ return lbAny, LineDontBreak, 211
// LB21b.
- {lbSY, prHL}: {lbHL, LineDontBreak, 212},
- {lbNUSY, prHL}: {lbHL, LineDontBreak, 212},
+ case lbSY | prHL<<32:
+ return lbHL, LineDontBreak, 212
+ case lbNUSY | prHL<<32:
+ return lbHL, LineDontBreak, 212
// LB22.
- {lbAny, prIN}: {lbAny, LineDontBreak, 220},
+ case lbAny | prIN<<32:
+ return lbAny, LineDontBreak, 220
// LB23.
- {lbAny, prAL}: {lbAL, LineCanBreak, 310},
- {lbAny, prNU}: {lbNU, LineCanBreak, 310},
- {lbAL, prNU}: {lbNU, LineDontBreak, 230},
- {lbHL, prNU}: {lbNU, LineDontBreak, 230},
- {lbNU, prAL}: {lbAL, LineDontBreak, 230},
- {lbNU, prHL}: {lbHL, LineDontBreak, 230},
- {lbNUNU, prAL}: {lbAL, LineDontBreak, 230},
- {lbNUNU, prHL}: {lbHL, LineDontBreak, 230},
+ case lbAny | prAL<<32:
+ return lbAL, LineCanBreak, 310
+ case lbAny | prNU<<32:
+ return lbNU, LineCanBreak, 310
+ case lbAL | prNU<<32:
+ return lbNU, LineDontBreak, 230
+ case lbHL | prNU<<32:
+ return lbNU, LineDontBreak, 230
+ case lbNU | prAL<<32:
+ return lbAL, LineDontBreak, 230
+ case lbNU | prHL<<32:
+ return lbHL, LineDontBreak, 230
+ case lbNUNU | prAL<<32:
+ return lbAL, LineDontBreak, 230
+ case lbNUNU | prHL<<32:
+ return lbHL, LineDontBreak, 230
// LB23a.
- {lbAny, prPR}: {lbPR, LineCanBreak, 310},
- {lbAny, prID}: {lbIDEM, LineCanBreak, 310},
- {lbAny, prEB}: {lbEB, LineCanBreak, 310},
- {lbAny, prEM}: {lbIDEM, LineCanBreak, 310},
- {lbPR, prID}: {lbIDEM, LineDontBreak, 231},
- {lbPR, prEB}: {lbEB, LineDontBreak, 231},
- {lbPR, prEM}: {lbIDEM, LineDontBreak, 231},
- {lbIDEM, prPO}: {lbPO, LineDontBreak, 231},
- {lbEB, prPO}: {lbPO, LineDontBreak, 231},
+ case lbAny | prPR<<32:
+ return lbPR, LineCanBreak, 310
+ case lbAny | prID<<32:
+ return lbIDEM, LineCanBreak, 310
+ case lbAny | prEB<<32:
+ return lbEB, LineCanBreak, 310
+ case lbAny | prEM<<32:
+ return lbIDEM, LineCanBreak, 310
+ case lbPR | prID<<32:
+ return lbIDEM, LineDontBreak, 231
+ case lbPR | prEB<<32:
+ return lbEB, LineDontBreak, 231
+ case lbPR | prEM<<32:
+ return lbIDEM, LineDontBreak, 231
+ case lbIDEM | prPO<<32:
+ return lbPO, LineDontBreak, 231
+ case lbEB | prPO<<32:
+ return lbPO, LineDontBreak, 231
// LB24.
- {lbAny, prPO}: {lbPO, LineCanBreak, 310},
- {lbPR, prAL}: {lbAL, LineDontBreak, 240},
- {lbPR, prHL}: {lbHL, LineDontBreak, 240},
- {lbPO, prAL}: {lbAL, LineDontBreak, 240},
- {lbPO, prHL}: {lbHL, LineDontBreak, 240},
- {lbAL, prPR}: {lbPR, LineDontBreak, 240},
- {lbAL, prPO}: {lbPO, LineDontBreak, 240},
- {lbHL, prPR}: {lbPR, LineDontBreak, 240},
- {lbHL, prPO}: {lbPO, LineDontBreak, 240},
+ case lbAny | prPO<<32:
+ return lbPO, LineCanBreak, 310
+ case lbPR | prAL<<32:
+ return lbAL, LineDontBreak, 240
+ case lbPR | prHL<<32:
+ return lbHL, LineDontBreak, 240
+ case lbPO | prAL<<32:
+ return lbAL, LineDontBreak, 240
+ case lbPO | prHL<<32:
+ return lbHL, LineDontBreak, 240
+ case lbAL | prPR<<32:
+ return lbPR, LineDontBreak, 240
+ case lbAL | prPO<<32:
+ return lbPO, LineDontBreak, 240
+ case lbHL | prPR<<32:
+ return lbPR, LineDontBreak, 240
+ case lbHL | prPO<<32:
+ return lbPO, LineDontBreak, 240
// LB25 (simple transitions).
- {lbPR, prNU}: {lbNU, LineDontBreak, 250},
- {lbPO, prNU}: {lbNU, LineDontBreak, 250},
- {lbOP, prNU}: {lbNU, LineDontBreak, 250},
- {lbHY, prNU}: {lbNU, LineDontBreak, 250},
- {lbNU, prNU}: {lbNUNU, LineDontBreak, 250},
- {lbNU, prSY}: {lbNUSY, LineDontBreak, 250},
- {lbNU, prIS}: {lbNUIS, LineDontBreak, 250},
- {lbNUNU, prNU}: {lbNUNU, LineDontBreak, 250},
- {lbNUNU, prSY}: {lbNUSY, LineDontBreak, 250},
- {lbNUNU, prIS}: {lbNUIS, LineDontBreak, 250},
- {lbNUSY, prNU}: {lbNUNU, LineDontBreak, 250},
- {lbNUSY, prSY}: {lbNUSY, LineDontBreak, 250},
- {lbNUSY, prIS}: {lbNUIS, LineDontBreak, 250},
- {lbNUIS, prNU}: {lbNUNU, LineDontBreak, 250},
- {lbNUIS, prSY}: {lbNUSY, LineDontBreak, 250},
- {lbNUIS, prIS}: {lbNUIS, LineDontBreak, 250},
- {lbNU, prCL}: {lbNUCL, LineDontBreak, 250},
- {lbNU, prCP}: {lbNUCP, LineDontBreak, 250},
- {lbNUNU, prCL}: {lbNUCL, LineDontBreak, 250},
- {lbNUNU, prCP}: {lbNUCP, LineDontBreak, 250},
- {lbNUSY, prCL}: {lbNUCL, LineDontBreak, 250},
- {lbNUSY, prCP}: {lbNUCP, LineDontBreak, 250},
- {lbNUIS, prCL}: {lbNUCL, LineDontBreak, 250},
- {lbNUIS, prCP}: {lbNUCP, LineDontBreak, 250},
- {lbNU, prPO}: {lbPO, LineDontBreak, 250},
- {lbNUNU, prPO}: {lbPO, LineDontBreak, 250},
- {lbNUSY, prPO}: {lbPO, LineDontBreak, 250},
- {lbNUIS, prPO}: {lbPO, LineDontBreak, 250},
- {lbNUCL, prPO}: {lbPO, LineDontBreak, 250},
- {lbNUCP, prPO}: {lbPO, LineDontBreak, 250},
- {lbNU, prPR}: {lbPR, LineDontBreak, 250},
- {lbNUNU, prPR}: {lbPR, LineDontBreak, 250},
- {lbNUSY, prPR}: {lbPR, LineDontBreak, 250},
- {lbNUIS, prPR}: {lbPR, LineDontBreak, 250},
- {lbNUCL, prPR}: {lbPR, LineDontBreak, 250},
- {lbNUCP, prPR}: {lbPR, LineDontBreak, 250},
+ case lbPR | prNU<<32:
+ return lbNU, LineDontBreak, 250
+ case lbPO | prNU<<32:
+ return lbNU, LineDontBreak, 250
+ case lbOP | prNU<<32:
+ return lbNU, LineDontBreak, 250
+ case lbHY | prNU<<32:
+ return lbNU, LineDontBreak, 250
+ case lbNU | prNU<<32:
+ return lbNUNU, LineDontBreak, 250
+ case lbNU | prSY<<32:
+ return lbNUSY, LineDontBreak, 250
+ case lbNU | prIS<<32:
+ return lbNUIS, LineDontBreak, 250
+ case lbNUNU | prNU<<32:
+ return lbNUNU, LineDontBreak, 250
+ case lbNUNU | prSY<<32:
+ return lbNUSY, LineDontBreak, 250
+ case lbNUNU | prIS<<32:
+ return lbNUIS, LineDontBreak, 250
+ case lbNUSY | prNU<<32:
+ return lbNUNU, LineDontBreak, 250
+ case lbNUSY | prSY<<32:
+ return lbNUSY, LineDontBreak, 250
+ case lbNUSY | prIS<<32:
+ return lbNUIS, LineDontBreak, 250
+ case lbNUIS | prNU<<32:
+ return lbNUNU, LineDontBreak, 250
+ case lbNUIS | prSY<<32:
+ return lbNUSY, LineDontBreak, 250
+ case lbNUIS | prIS<<32:
+ return lbNUIS, LineDontBreak, 250
+ case lbNU | prCL<<32:
+ return lbNUCL, LineDontBreak, 250
+ case lbNU | prCP<<32:
+ return lbNUCP, LineDontBreak, 250
+ case lbNUNU | prCL<<32:
+ return lbNUCL, LineDontBreak, 250
+ case lbNUNU | prCP<<32:
+ return lbNUCP, LineDontBreak, 250
+ case lbNUSY | prCL<<32:
+ return lbNUCL, LineDontBreak, 250
+ case lbNUSY | prCP<<32:
+ return lbNUCP, LineDontBreak, 250
+ case lbNUIS | prCL<<32:
+ return lbNUCL, LineDontBreak, 250
+ case lbNUIS | prCP<<32:
+ return lbNUCP, LineDontBreak, 250
+ case lbNU | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNUNU | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNUSY | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNUIS | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNUCL | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNUCP | prPO<<32:
+ return lbPO, LineDontBreak, 250
+ case lbNU | prPR<<32:
+ return lbPR, LineDontBreak, 250
+ case lbNUNU | prPR<<32:
+ return lbPR, LineDontBreak, 250
+ case lbNUSY | prPR<<32:
+ return lbPR, LineDontBreak, 250
+ case lbNUIS | prPR<<32:
+ return lbPR, LineDontBreak, 250
+ case lbNUCL | prPR<<32:
+ return lbPR, LineDontBreak, 250
+ case lbNUCP | prPR<<32:
+ return lbPR, LineDontBreak, 250
// LB26.
- {lbAny, prJL}: {lbJL, LineCanBreak, 310},
- {lbAny, prJV}: {lbJV, LineCanBreak, 310},
- {lbAny, prJT}: {lbJT, LineCanBreak, 310},
- {lbAny, prH2}: {lbH2, LineCanBreak, 310},
- {lbAny, prH3}: {lbH3, LineCanBreak, 310},
- {lbJL, prJL}: {lbJL, LineDontBreak, 260},
- {lbJL, prJV}: {lbJV, LineDontBreak, 260},
- {lbJL, prH2}: {lbH2, LineDontBreak, 260},
- {lbJL, prH3}: {lbH3, LineDontBreak, 260},
- {lbJV, prJV}: {lbJV, LineDontBreak, 260},
- {lbJV, prJT}: {lbJT, LineDontBreak, 260},
- {lbH2, prJV}: {lbJV, LineDontBreak, 260},
- {lbH2, prJT}: {lbJT, LineDontBreak, 260},
- {lbJT, prJT}: {lbJT, LineDontBreak, 260},
- {lbH3, prJT}: {lbJT, LineDontBreak, 260},
+ case lbAny | prJL<<32:
+ return lbJL, LineCanBreak, 310
+ case lbAny | prJV<<32:
+ return lbJV, LineCanBreak, 310
+ case lbAny | prJT<<32:
+ return lbJT, LineCanBreak, 310
+ case lbAny | prH2<<32:
+ return lbH2, LineCanBreak, 310
+ case lbAny | prH3<<32:
+ return lbH3, LineCanBreak, 310
+ case lbJL | prJL<<32:
+ return lbJL, LineDontBreak, 260
+ case lbJL | prJV<<32:
+ return lbJV, LineDontBreak, 260
+ case lbJL | prH2<<32:
+ return lbH2, LineDontBreak, 260
+ case lbJL | prH3<<32:
+ return lbH3, LineDontBreak, 260
+ case lbJV | prJV<<32:
+ return lbJV, LineDontBreak, 260
+ case lbJV | prJT<<32:
+ return lbJT, LineDontBreak, 260
+ case lbH2 | prJV<<32:
+ return lbJV, LineDontBreak, 260
+ case lbH2 | prJT<<32:
+ return lbJT, LineDontBreak, 260
+ case lbJT | prJT<<32:
+ return lbJT, LineDontBreak, 260
+ case lbH3 | prJT<<32:
+ return lbJT, LineDontBreak, 260
// LB27.
- {lbJL, prPO}: {lbPO, LineDontBreak, 270},
- {lbJV, prPO}: {lbPO, LineDontBreak, 270},
- {lbJT, prPO}: {lbPO, LineDontBreak, 270},
- {lbH2, prPO}: {lbPO, LineDontBreak, 270},
- {lbH3, prPO}: {lbPO, LineDontBreak, 270},
- {lbPR, prJL}: {lbJL, LineDontBreak, 270},
- {lbPR, prJV}: {lbJV, LineDontBreak, 270},
- {lbPR, prJT}: {lbJT, LineDontBreak, 270},
- {lbPR, prH2}: {lbH2, LineDontBreak, 270},
- {lbPR, prH3}: {lbH3, LineDontBreak, 270},
+ case lbJL | prPO<<32:
+ return lbPO, LineDontBreak, 270
+ case lbJV | prPO<<32:
+ return lbPO, LineDontBreak, 270
+ case lbJT | prPO<<32:
+ return lbPO, LineDontBreak, 270
+ case lbH2 | prPO<<32:
+ return lbPO, LineDontBreak, 270
+ case lbH3 | prPO<<32:
+ return lbPO, LineDontBreak, 270
+ case lbPR | prJL<<32:
+ return lbJL, LineDontBreak, 270
+ case lbPR | prJV<<32:
+ return lbJV, LineDontBreak, 270
+ case lbPR | prJT<<32:
+ return lbJT, LineDontBreak, 270
+ case lbPR | prH2<<32:
+ return lbH2, LineDontBreak, 270
+ case lbPR | prH3<<32:
+ return lbH3, LineDontBreak, 270
// LB28.
- {lbAL, prAL}: {lbAL, LineDontBreak, 280},
- {lbAL, prHL}: {lbHL, LineDontBreak, 280},
- {lbHL, prAL}: {lbAL, LineDontBreak, 280},
- {lbHL, prHL}: {lbHL, LineDontBreak, 280},
+ case lbAL | prAL<<32:
+ return lbAL, LineDontBreak, 280
+ case lbAL | prHL<<32:
+ return lbHL, LineDontBreak, 280
+ case lbHL | prAL<<32:
+ return lbAL, LineDontBreak, 280
+ case lbHL | prHL<<32:
+ return lbHL, LineDontBreak, 280
// LB29.
- {lbIS, prAL}: {lbAL, LineDontBreak, 290},
- {lbIS, prHL}: {lbHL, LineDontBreak, 290},
- {lbNUIS, prAL}: {lbAL, LineDontBreak, 290},
- {lbNUIS, prHL}: {lbHL, LineDontBreak, 290},
+ case lbIS | prAL<<32:
+ return lbAL, LineDontBreak, 290
+ case lbIS | prHL<<32:
+ return lbHL, LineDontBreak, 290
+ case lbNUIS | prAL<<32:
+ return lbAL, LineDontBreak, 290
+ case lbNUIS | prHL<<32:
+ return lbHL, LineDontBreak, 290
+
+ default:
+ return -1, -1, -1
+ }
}
// transitionLineBreakState determines the new state of the line break parser
@@ -290,7 +449,7 @@ var lbTransitions = map[[2]int][3]int{
// further lookups.
func transitionLineBreakState(state int, r rune, b []byte, str string) (newState int, lineBreak int) {
// Determine the property of the next character.
- nextProperty, generalCategory := propertyWithGenCat(lineBreakCodePoints, r)
+ nextProperty, generalCategory := propertyLineBreak(r)
// Prepare.
var forceNoBreak, isCPeaFWH bool
@@ -306,7 +465,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState
defer func() {
// Transition into LB30.
if newState == lbCP || newState == lbNUCP {
- ea := property(eastAsianWidth, r)
+ ea := propertyEastAsianWidth(r)
if ea != prF && ea != prW && ea != prH {
newState |= lbCPeaFWHBit
}
@@ -352,30 +511,27 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState
// Find the applicable transition in the table.
var rule int
- transition, ok := lbTransitions[[2]int{state, nextProperty}]
- if ok {
- // We have a specific transition. We'll use it.
- newState, lineBreak, rule = transition[0], transition[1], transition[2]
- } else {
+ newState, lineBreak, rule = lbTransitions(state, nextProperty)
+ if newState < 0 {
// No specific transition found. Try the less specific ones.
- transAnyProp, okAnyProp := lbTransitions[[2]int{state, prAny}]
- transAnyState, okAnyState := lbTransitions[[2]int{lbAny, nextProperty}]
- if okAnyProp && okAnyState {
+ anyPropProp, anyPropLineBreak, anyPropRule := lbTransitions(state, prAny)
+ anyStateProp, anyStateLineBreak, anyStateRule := lbTransitions(lbAny, nextProperty)
+ if anyPropProp >= 0 && anyStateProp >= 0 {
// Both apply. We'll use a mix (see comments for grTransitions).
- newState, lineBreak, rule = transAnyState[0], transAnyState[1], transAnyState[2]
- if transAnyProp[2] < transAnyState[2] {
- lineBreak, rule = transAnyProp[1], transAnyProp[2]
+ newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule
+ if anyPropRule < anyStateRule {
+ lineBreak, rule = anyPropLineBreak, anyPropRule
}
- } else if okAnyProp {
+ } else if anyPropProp >= 0 {
// We only have a specific state.
- newState, lineBreak, rule = transAnyProp[0], transAnyProp[1], transAnyProp[2]
+ newState, lineBreak, rule = anyPropProp, anyPropLineBreak, anyPropRule
// This branch will probably never be reached because okAnyState will
// always be true given the current transition map. But we keep it here
// for future modifications to the transition map where this may not be
// true anymore.
- } else if okAnyState {
+ } else if anyStateProp >= 0 {
// We only have a specific property.
- newState, lineBreak, rule = transAnyState[0], transAnyState[1], transAnyState[2]
+ newState, lineBreak, rule = anyStateProp, anyStateLineBreak, anyStateRule
} else {
// No known transition. LB31: ALL ÷ ALL.
newState, lineBreak, rule = lbAny, LineCanBreak, 310
@@ -414,7 +570,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState
r, _ = utf8.DecodeRuneInString(str)
}
if r != utf8.RuneError {
- pr, _ := propertyWithGenCat(lineBreakCodePoints, r)
+ pr, _ := propertyLineBreak(r)
if pr == prNU {
return lbNU, LineDontBreak
}
@@ -424,7 +580,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState
// LB30 (part one).
if rule > 300 {
if (state == lbAL || state == lbHL || state == lbNU || state == lbNUNU) && nextProperty == prOP {
- ea := property(eastAsianWidth, r)
+ ea := propertyEastAsianWidth(r)
if ea != prF && ea != prW && ea != prH {
return lbOP, LineDontBreak
}
@@ -460,7 +616,7 @@ func transitionLineBreakState(state int, r rune, b []byte, str string) (newState
return prAny, LineDontBreak
}
}
- graphemeProperty := property(graphemeCodePoints, r)
+ graphemeProperty := propertyGraphemes(r)
if graphemeProperty == prExtendedPictographic && generalCategory == gcCn {
return lbExtPicCn, LineCanBreak
}
diff --git a/vendor/github.com/rivo/uniseg/properties.go b/vendor/github.com/rivo/uniseg/properties.go
index bc3c7bcf3..6290e6810 100644
--- a/vendor/github.com/rivo/uniseg/properties.go
+++ b/vendor/github.com/rivo/uniseg/properties.go
@@ -160,9 +160,49 @@ func property(dictionary [][3]int, r rune) int {
return propertySearch(dictionary, r)[2]
}
-// propertyWithGenCat returns the Unicode property value and General Category
-// (see constants above) of the given code point.
-func propertyWithGenCat(dictionary [][4]int, r rune) (property, generalCategory int) {
- entry := propertySearch(dictionary, r)
+// propertyLineBreak returns the Unicode property value and General Category
+// (see constants above) of the given code point, as listed in the line break
+// code points table, while fast tracking ASCII digits and letters.
+func propertyLineBreak(r rune) (property, generalCategory int) {
+ if r >= 'a' && r <= 'z' {
+ return prAL, gcLl
+ }
+ if r >= 'A' && r <= 'Z' {
+ return prAL, gcLu
+ }
+ if r >= '0' && r <= '9' {
+ return prNU, gcNd
+ }
+ entry := propertySearch(lineBreakCodePoints, r)
return entry[2], entry[3]
}
+
+// propertyGraphemes returns the Unicode grapheme cluster property value of the
+// given code point while fast tracking ASCII characters.
+func propertyGraphemes(r rune) int {
+ if r >= 0x20 && r <= 0x7e {
+ return prAny
+ }
+ if r == 0x0a {
+ return prLF
+ }
+ if r == 0x0d {
+ return prCR
+ }
+ if r >= 0 && r <= 0x1f || r == 0x7f {
+ return prControl
+ }
+ return property(graphemeCodePoints, r)
+}
+
+// propertyEastAsianWidth returns the Unicode East Asian Width property value of
+// the given code point while fast tracking ASCII characters.
+func propertyEastAsianWidth(r rune) int {
+ if r >= 0x20 && r <= 0x7e {
+ return prNa
+ }
+ if r >= 0 && r <= 0x1f || r == 0x7f {
+ return prN
+ }
+ return property(eastAsianWidth, r)
+}
diff --git a/vendor/github.com/rivo/uniseg/sentenceproperties.go b/vendor/github.com/rivo/uniseg/sentenceproperties.go
index ba0cf2de1..67717ec1f 100644
--- a/vendor/github.com/rivo/uniseg/sentenceproperties.go
+++ b/vendor/github.com/rivo/uniseg/sentenceproperties.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// sentenceBreakCodePoints are taken from
-// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/SentenceBreakProperty.txt
+// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/SentenceBreakProperty.txt
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var sentenceBreakCodePoints = [][3]int{
{0x0009, 0x0009, prSp}, // Cc <control-0009>
@@ -843,6 +843,7 @@ var sentenceBreakCodePoints = [][3]int{
{0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
{0x0CE6, 0x0CEF, prNumeric}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
{0x0CF1, 0x0CF2, prOLetter}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prExtend}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
{0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
{0x0D02, 0x0D03, prExtend}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
{0x0D04, 0x0D0C, prOLetter}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
@@ -896,7 +897,7 @@ var sentenceBreakCodePoints = [][3]int{
{0x0EBD, 0x0EBD, prOLetter}, // Lo LAO SEMIVOWEL SIGN NYO
{0x0EC0, 0x0EC4, prOLetter}, // Lo [5] LAO VOWEL SIGN E..LAO VOWEL SIGN AI
{0x0EC6, 0x0EC6, prOLetter}, // Lm LAO KO LA
- {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
{0x0ED0, 0x0ED9, prNumeric}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
{0x0EDC, 0x0EDF, prOLetter}, // Lo [4] LAO HO NO..LAO LETTER KHMU NYO
{0x0F00, 0x0F00, prOLetter}, // Lo TIBETAN SYLLABLE OM
@@ -958,7 +959,7 @@ var sentenceBreakCodePoints = [][3]int{
{0x10C7, 0x10C7, prUpper}, // L& GEORGIAN CAPITAL LETTER YN
{0x10CD, 0x10CD, prUpper}, // L& GEORGIAN CAPITAL LETTER AEN
{0x10D0, 0x10FA, prOLetter}, // L& [43] GEORGIAN LETTER AN..GEORGIAN LETTER AIN
- {0x10FC, 0x10FC, prOLetter}, // Lm MODIFIER LETTER GEORGIAN NAR
+ {0x10FC, 0x10FC, prLower}, // Lm MODIFIER LETTER GEORGIAN NAR
{0x10FD, 0x10FF, prOLetter}, // L& [3] GEORGIAN LETTER AEN..GEORGIAN LETTER LABIAL SIGN
{0x1100, 0x1248, prOLetter}, // Lo [329] HANGUL CHOSEONG KIYEOK..ETHIOPIC SYLLABLE QWA
{0x124A, 0x124D, prOLetter}, // Lo [4] ETHIOPIC SYLLABLE QWI..ETHIOPIC SYLLABLE QWE
@@ -2034,7 +2035,7 @@ var sentenceBreakCodePoints = [][3]int{
{0xA7D7, 0xA7D7, prLower}, // L& LATIN SMALL LETTER MIDDLE SCOTS S
{0xA7D8, 0xA7D8, prUpper}, // L& LATIN CAPITAL LETTER SIGMOID S
{0xA7D9, 0xA7D9, prLower}, // L& LATIN SMALL LETTER SIGMOID S
- {0xA7F2, 0xA7F4, prOLetter}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
+ {0xA7F2, 0xA7F4, prLower}, // Lm [3] MODIFIER LETTER CAPITAL C..MODIFIER LETTER CAPITAL Q
{0xA7F5, 0xA7F5, prUpper}, // L& LATIN CAPITAL LETTER REVERSED HALF H
{0xA7F6, 0xA7F6, prLower}, // L& LATIN SMALL LETTER REVERSED HALF H
{0xA7F7, 0xA7F7, prOLetter}, // Lo LATIN EPIGRAPHIC LETTER SIDEWAYS I
@@ -2140,7 +2141,7 @@ var sentenceBreakCodePoints = [][3]int{
{0xAB30, 0xAB5A, prLower}, // L& [43] LATIN SMALL LETTER BARRED ALPHA..LATIN SMALL LETTER Y WITH SHORT RIGHT LEG
{0xAB5C, 0xAB5F, prLower}, // Lm [4] MODIFIER LETTER SMALL HENG..MODIFIER LETTER SMALL U WITH LEFT HOOK
{0xAB60, 0xAB68, prLower}, // L& [9] LATIN SMALL LETTER SAKHA YAT..LATIN SMALL LETTER TURNED R WITH MIDDLE TILDE
- {0xAB69, 0xAB69, prOLetter}, // Lm MODIFIER LETTER SMALL TURNED W
+ {0xAB69, 0xAB69, prLower}, // Lm MODIFIER LETTER SMALL TURNED W
{0xAB70, 0xABBF, prLower}, // L& [80] CHEROKEE SMALL LETTER A..CHEROKEE SMALL LETTER YA
{0xABC0, 0xABE2, prOLetter}, // Lo [35] MEETEI MAYEK LETTER KOK..MEETEI MAYEK LETTER I LONSUM
{0xABE3, 0xABE4, prExtend}, // Mc [2] MEETEI MAYEK VOWEL SIGN ONAP..MEETEI MAYEK VOWEL SIGN INAP
@@ -2334,6 +2335,7 @@ var sentenceBreakCodePoints = [][3]int{
{0x10E80, 0x10EA9, prOLetter}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
{0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
{0x10EB0, 0x10EB1, prOLetter}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
{0x10F00, 0x10F1C, prOLetter}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
{0x10F27, 0x10F27, prOLetter}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
{0x10F30, 0x10F45, prOLetter}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
@@ -2408,6 +2410,8 @@ var sentenceBreakCodePoints = [][3]int{
{0x11238, 0x11239, prSTerm}, // Po [2] KHOJKI DANDA..KHOJKI DOUBLE DANDA
{0x1123B, 0x1123C, prSTerm}, // Po [2] KHOJKI SECTION MARK..KHOJKI DOUBLE SECTION MARK
{0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prOLetter}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R
{0x11280, 0x11286, prOLetter}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
{0x11288, 0x11288, prOLetter}, // Lo MULTANI LETTER GHA
{0x1128A, 0x1128D, prOLetter}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
@@ -2603,13 +2607,29 @@ var sentenceBreakCodePoints = [][3]int{
{0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
{0x11EF5, 0x11EF6, prExtend}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
{0x11EF7, 0x11EF8, prSTerm}, // Po [2] MAKASAR PASSIMBANG..MAKASAR END OF SECTION
+ {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prOLetter}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prExtend}, // Mc KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prOLetter}, // Lo [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prOLetter}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prExtend}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prExtend}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prExtend}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER
+ {0x11F43, 0x11F44, prSTerm}, // Po [2] KAWI DANDA..KAWI DOUBLE DANDA
+ {0x11F50, 0x11F59, prNumeric}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
{0x11FB0, 0x11FB0, prOLetter}, // Lo LISU LETTER YHA
{0x12000, 0x12399, prOLetter}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
{0x12400, 0x1246E, prOLetter}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
{0x12480, 0x12543, prOLetter}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
{0x12F90, 0x12FF0, prOLetter}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
- {0x13000, 0x1342E, prOLetter}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032
- {0x13430, 0x13438, prFormat}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13000, 0x1342F, prOLetter}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x1343F, prFormat}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prOLetter}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
{0x14400, 0x14646, prOLetter}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
{0x16800, 0x16A38, prOLetter}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
{0x16A40, 0x16A5E, prOLetter}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
@@ -2648,7 +2668,9 @@ var sentenceBreakCodePoints = [][3]int{
{0x1AFF5, 0x1AFFB, prOLetter}, // Lm [7] KATAKANA LETTER MINNAN TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-5
{0x1AFFD, 0x1AFFE, prOLetter}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
{0x1B000, 0x1B122, prOLetter}, // Lo [291] KATAKANA LETTER ARCHAIC E..KATAKANA LETTER ARCHAIC WU
+ {0x1B132, 0x1B132, prOLetter}, // Lo HIRAGANA LETTER SMALL KO
{0x1B150, 0x1B152, prOLetter}, // Lo [3] HIRAGANA LETTER SMALL WI..HIRAGANA LETTER SMALL WO
+ {0x1B155, 0x1B155, prOLetter}, // Lo KATAKANA LETTER SMALL KO
{0x1B164, 0x1B167, prOLetter}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
{0x1B170, 0x1B2FB, prOLetter}, // Lo [396] NUSHU CHARACTER-1B170..NUSHU CHARACTER-1B2FB
{0x1BC00, 0x1BC6A, prOLetter}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
@@ -2738,11 +2760,14 @@ var sentenceBreakCodePoints = [][3]int{
{0x1DF00, 0x1DF09, prLower}, // L& [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
{0x1DF0A, 0x1DF0A, prOLetter}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
{0x1DF0B, 0x1DF1E, prLower}, // L& [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prLower}, // L& [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
{0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
{0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
{0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
{0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
{0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prLower}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
{0x1E100, 0x1E12C, prOLetter}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
{0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
{0x1E137, 0x1E13D, prOLetter}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
@@ -2753,6 +2778,10 @@ var sentenceBreakCodePoints = [][3]int{
{0x1E2C0, 0x1E2EB, prOLetter}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH
{0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
{0x1E2F0, 0x1E2F9, prNumeric}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E4D0, 0x1E4EA, prOLetter}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prOLetter}, // Lm NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prNumeric}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
{0x1E7E0, 0x1E7E6, prOLetter}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
{0x1E7E8, 0x1E7EB, prOLetter}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
{0x1E7ED, 0x1E7EE, prOLetter}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
@@ -2803,12 +2832,13 @@ var sentenceBreakCodePoints = [][3]int{
{0x1F676, 0x1F678, prClose}, // So [3] SANS-SERIF HEAVY DOUBLE TURNED COMMA QUOTATION MARK ORNAMENT..SANS-SERIF HEAVY LOW DOUBLE COMMA QUOTATION MARK ORNAMENT
{0x1FBF0, 0x1FBF9, prNumeric}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
{0x20000, 0x2A6DF, prOLetter}, // Lo [42720] CJK UNIFIED IDEOGRAPH-20000..CJK UNIFIED IDEOGRAPH-2A6DF
- {0x2A700, 0x2B738, prOLetter}, // Lo [4153] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B738
+ {0x2A700, 0x2B739, prOLetter}, // Lo [4154] CJK UNIFIED IDEOGRAPH-2A700..CJK UNIFIED IDEOGRAPH-2B739
{0x2B740, 0x2B81D, prOLetter}, // Lo [222] CJK UNIFIED IDEOGRAPH-2B740..CJK UNIFIED IDEOGRAPH-2B81D
{0x2B820, 0x2CEA1, prOLetter}, // Lo [5762] CJK UNIFIED IDEOGRAPH-2B820..CJK UNIFIED IDEOGRAPH-2CEA1
{0x2CEB0, 0x2EBE0, prOLetter}, // Lo [7473] CJK UNIFIED IDEOGRAPH-2CEB0..CJK UNIFIED IDEOGRAPH-2EBE0
{0x2F800, 0x2FA1D, prOLetter}, // Lo [542] CJK COMPATIBILITY IDEOGRAPH-2F800..CJK COMPATIBILITY IDEOGRAPH-2FA1D
{0x30000, 0x3134A, prOLetter}, // Lo [4939] CJK UNIFIED IDEOGRAPH-30000..CJK UNIFIED IDEOGRAPH-3134A
+ {0x31350, 0x323AF, prOLetter}, // Lo [4192] CJK UNIFIED IDEOGRAPH-31350..CJK UNIFIED IDEOGRAPH-323AF
{0xE0001, 0xE0001, prFormat}, // Cf LANGUAGE TAG
{0xE0020, 0xE007F, prExtend}, // Cf [96] TAG SPACE..CANCEL TAG
{0xE0100, 0xE01EF, prExtend}, // Mn [240] VARIATION SELECTOR-17..VARIATION SELECTOR-256
diff --git a/vendor/github.com/rivo/uniseg/sentencerules.go b/vendor/github.com/rivo/uniseg/sentencerules.go
index 58c04794e..0b29c7bdb 100644
--- a/vendor/github.com/rivo/uniseg/sentencerules.go
+++ b/vendor/github.com/rivo/uniseg/sentencerules.go
@@ -18,104 +18,178 @@ const (
sbSB8aSp
)
-// The sentence break parser's breaking instructions.
-const (
- sbDontBreak = iota
- sbBreak
-)
-
-// The sentence break parser's state transitions. It's anologous to
-// grTransitions, see comments there for details. Unicode version 14.0.0.
-var sbTransitions = map[[2]int][3]int{
+// sbTransitions implements the sentence break parser's state transitions. It's
+// anologous to [grTransitions], see comments there for details.
+//
+// Unicode version 15.0.0.
+func sbTransitions(state, prop int) (newState int, sentenceBreak bool, rule int) {
+ switch uint64(state) | uint64(prop)<<32 {
// SB3.
- {sbAny, prCR}: {sbCR, sbDontBreak, 9990},
- {sbCR, prLF}: {sbParaSep, sbDontBreak, 30},
+ case sbAny | prCR<<32:
+ return sbCR, false, 9990
+ case sbCR | prLF<<32:
+ return sbParaSep, false, 30
// SB4.
- {sbAny, prSep}: {sbParaSep, sbDontBreak, 9990},
- {sbAny, prLF}: {sbParaSep, sbDontBreak, 9990},
- {sbParaSep, prAny}: {sbAny, sbBreak, 40},
- {sbCR, prAny}: {sbAny, sbBreak, 40},
+ case sbAny | prSep<<32:
+ return sbParaSep, false, 9990
+ case sbAny | prLF<<32:
+ return sbParaSep, false, 9990
+ case sbParaSep | prAny<<32:
+ return sbAny, true, 40
+ case sbCR | prAny<<32:
+ return sbAny, true, 40
// SB6.
- {sbAny, prATerm}: {sbATerm, sbDontBreak, 9990},
- {sbATerm, prNumeric}: {sbAny, sbDontBreak, 60},
- {sbSB7, prNumeric}: {sbAny, sbDontBreak, 60}, // Because ATerm also appears in SB7.
+ case sbAny | prATerm<<32:
+ return sbATerm, false, 9990
+ case sbATerm | prNumeric<<32:
+ return sbAny, false, 60
+ case sbSB7 | prNumeric<<32:
+ return sbAny, false, 60 // Because ATerm also appears in SB7.
// SB7.
- {sbAny, prUpper}: {sbUpper, sbDontBreak, 9990},
- {sbAny, prLower}: {sbLower, sbDontBreak, 9990},
- {sbUpper, prATerm}: {sbSB7, sbDontBreak, 70},
- {sbLower, prATerm}: {sbSB7, sbDontBreak, 70},
- {sbSB7, prUpper}: {sbUpper, sbDontBreak, 70},
+ case sbAny | prUpper<<32:
+ return sbUpper, false, 9990
+ case sbAny | prLower<<32:
+ return sbLower, false, 9990
+ case sbUpper | prATerm<<32:
+ return sbSB7, false, 70
+ case sbLower | prATerm<<32:
+ return sbSB7, false, 70
+ case sbSB7 | prUpper<<32:
+ return sbUpper, false, 70
// SB8a.
- {sbAny, prSTerm}: {sbSTerm, sbDontBreak, 9990},
- {sbATerm, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbATerm, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbATerm, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSB7, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSB7, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSB7, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSB8Close, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSB8Close, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSB8Close, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSB8Sp, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSB8Sp, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSB8Sp, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSTerm, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSTerm, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSTerm, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSB8aClose, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSB8aClose, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSB8aClose, prSTerm}: {sbSTerm, sbDontBreak, 81},
- {sbSB8aSp, prSContinue}: {sbAny, sbDontBreak, 81},
- {sbSB8aSp, prATerm}: {sbATerm, sbDontBreak, 81},
- {sbSB8aSp, prSTerm}: {sbSTerm, sbDontBreak, 81},
+ case sbAny | prSTerm<<32:
+ return sbSTerm, false, 9990
+ case sbATerm | prSContinue<<32:
+ return sbAny, false, 81
+ case sbATerm | prATerm<<32:
+ return sbATerm, false, 81
+ case sbATerm | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSB7 | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSB7 | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSB7 | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSB8Close | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSB8Close | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSB8Close | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSB8Sp | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSB8Sp | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSB8Sp | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSTerm | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSTerm | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSTerm | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSB8aClose | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSB8aClose | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSB8aClose | prSTerm<<32:
+ return sbSTerm, false, 81
+ case sbSB8aSp | prSContinue<<32:
+ return sbAny, false, 81
+ case sbSB8aSp | prATerm<<32:
+ return sbATerm, false, 81
+ case sbSB8aSp | prSTerm<<32:
+ return sbSTerm, false, 81
// SB9.
- {sbATerm, prClose}: {sbSB8Close, sbDontBreak, 90},
- {sbSB7, prClose}: {sbSB8Close, sbDontBreak, 90},
- {sbSB8Close, prClose}: {sbSB8Close, sbDontBreak, 90},
- {sbATerm, prSp}: {sbSB8Sp, sbDontBreak, 90},
- {sbSB7, prSp}: {sbSB8Sp, sbDontBreak, 90},
- {sbSB8Close, prSp}: {sbSB8Sp, sbDontBreak, 90},
- {sbSTerm, prClose}: {sbSB8aClose, sbDontBreak, 90},
- {sbSB8aClose, prClose}: {sbSB8aClose, sbDontBreak, 90},
- {sbSTerm, prSp}: {sbSB8aSp, sbDontBreak, 90},
- {sbSB8aClose, prSp}: {sbSB8aSp, sbDontBreak, 90},
- {sbATerm, prSep}: {sbParaSep, sbDontBreak, 90},
- {sbATerm, prCR}: {sbParaSep, sbDontBreak, 90},
- {sbATerm, prLF}: {sbParaSep, sbDontBreak, 90},
- {sbSB7, prSep}: {sbParaSep, sbDontBreak, 90},
- {sbSB7, prCR}: {sbParaSep, sbDontBreak, 90},
- {sbSB7, prLF}: {sbParaSep, sbDontBreak, 90},
- {sbSB8Close, prSep}: {sbParaSep, sbDontBreak, 90},
- {sbSB8Close, prCR}: {sbParaSep, sbDontBreak, 90},
- {sbSB8Close, prLF}: {sbParaSep, sbDontBreak, 90},
- {sbSTerm, prSep}: {sbParaSep, sbDontBreak, 90},
- {sbSTerm, prCR}: {sbParaSep, sbDontBreak, 90},
- {sbSTerm, prLF}: {sbParaSep, sbDontBreak, 90},
- {sbSB8aClose, prSep}: {sbParaSep, sbDontBreak, 90},
- {sbSB8aClose, prCR}: {sbParaSep, sbDontBreak, 90},
- {sbSB8aClose, prLF}: {sbParaSep, sbDontBreak, 90},
+ case sbATerm | prClose<<32:
+ return sbSB8Close, false, 90
+ case sbSB7 | prClose<<32:
+ return sbSB8Close, false, 90
+ case sbSB8Close | prClose<<32:
+ return sbSB8Close, false, 90
+ case sbATerm | prSp<<32:
+ return sbSB8Sp, false, 90
+ case sbSB7 | prSp<<32:
+ return sbSB8Sp, false, 90
+ case sbSB8Close | prSp<<32:
+ return sbSB8Sp, false, 90
+ case sbSTerm | prClose<<32:
+ return sbSB8aClose, false, 90
+ case sbSB8aClose | prClose<<32:
+ return sbSB8aClose, false, 90
+ case sbSTerm | prSp<<32:
+ return sbSB8aSp, false, 90
+ case sbSB8aClose | prSp<<32:
+ return sbSB8aSp, false, 90
+ case sbATerm | prSep<<32:
+ return sbParaSep, false, 90
+ case sbATerm | prCR<<32:
+ return sbParaSep, false, 90
+ case sbATerm | prLF<<32:
+ return sbParaSep, false, 90
+ case sbSB7 | prSep<<32:
+ return sbParaSep, false, 90
+ case sbSB7 | prCR<<32:
+ return sbParaSep, false, 90
+ case sbSB7 | prLF<<32:
+ return sbParaSep, false, 90
+ case sbSB8Close | prSep<<32:
+ return sbParaSep, false, 90
+ case sbSB8Close | prCR<<32:
+ return sbParaSep, false, 90
+ case sbSB8Close | prLF<<32:
+ return sbParaSep, false, 90
+ case sbSTerm | prSep<<32:
+ return sbParaSep, false, 90
+ case sbSTerm | prCR<<32:
+ return sbParaSep, false, 90
+ case sbSTerm | prLF<<32:
+ return sbParaSep, false, 90
+ case sbSB8aClose | prSep<<32:
+ return sbParaSep, false, 90
+ case sbSB8aClose | prCR<<32:
+ return sbParaSep, false, 90
+ case sbSB8aClose | prLF<<32:
+ return sbParaSep, false, 90
// SB10.
- {sbSB8Sp, prSp}: {sbSB8Sp, sbDontBreak, 100},
- {sbSB8aSp, prSp}: {sbSB8aSp, sbDontBreak, 100},
- {sbSB8Sp, prSep}: {sbParaSep, sbDontBreak, 100},
- {sbSB8Sp, prCR}: {sbParaSep, sbDontBreak, 100},
- {sbSB8Sp, prLF}: {sbParaSep, sbDontBreak, 100},
+ case sbSB8Sp | prSp<<32:
+ return sbSB8Sp, false, 100
+ case sbSB8aSp | prSp<<32:
+ return sbSB8aSp, false, 100
+ case sbSB8Sp | prSep<<32:
+ return sbParaSep, false, 100
+ case sbSB8Sp | prCR<<32:
+ return sbParaSep, false, 100
+ case sbSB8Sp | prLF<<32:
+ return sbParaSep, false, 100
// SB11.
- {sbATerm, prAny}: {sbAny, sbBreak, 110},
- {sbSB7, prAny}: {sbAny, sbBreak, 110},
- {sbSB8Close, prAny}: {sbAny, sbBreak, 110},
- {sbSB8Sp, prAny}: {sbAny, sbBreak, 110},
- {sbSTerm, prAny}: {sbAny, sbBreak, 110},
- {sbSB8aClose, prAny}: {sbAny, sbBreak, 110},
- {sbSB8aSp, prAny}: {sbAny, sbBreak, 110},
+ case sbATerm | prAny<<32:
+ return sbAny, true, 110
+ case sbSB7 | prAny<<32:
+ return sbAny, true, 110
+ case sbSB8Close | prAny<<32:
+ return sbAny, true, 110
+ case sbSB8Sp | prAny<<32:
+ return sbAny, true, 110
+ case sbSTerm | prAny<<32:
+ return sbAny, true, 110
+ case sbSB8aClose | prAny<<32:
+ return sbAny, true, 110
+ case sbSB8aSp | prAny<<32:
+ return sbAny, true, 110
// We'll always break after ParaSep due to SB4.
+
+ default:
+ return -1, false, -1
+ }
}
// transitionSentenceBreakState determines the new state of the sentence break
@@ -141,30 +215,27 @@ func transitionSentenceBreakState(state int, r rune, b []byte, str string) (newS
// Find the applicable transition in the table.
var rule int
- transition, ok := sbTransitions[[2]int{state, nextProperty}]
- if ok {
- // We have a specific transition. We'll use it.
- newState, sentenceBreak, rule = transition[0], transition[1] == sbBreak, transition[2]
- } else {
+ newState, sentenceBreak, rule = sbTransitions(state, nextProperty)
+ if newState < 0 {
// No specific transition found. Try the less specific ones.
- transAnyProp, okAnyProp := sbTransitions[[2]int{state, prAny}]
- transAnyState, okAnyState := sbTransitions[[2]int{sbAny, nextProperty}]
- if okAnyProp && okAnyState {
+ anyPropState, anyPropProp, anyPropRule := sbTransitions(state, prAny)
+ anyStateState, anyStateProp, anyStateRule := sbTransitions(sbAny, nextProperty)
+ if anyPropState >= 0 && anyStateState >= 0 {
// Both apply. We'll use a mix (see comments for grTransitions).
- newState, sentenceBreak, rule = transAnyState[0], transAnyState[1] == sbBreak, transAnyState[2]
- if transAnyProp[2] < transAnyState[2] {
- sentenceBreak, rule = transAnyProp[1] == sbBreak, transAnyProp[2]
+ newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule
+ if anyPropRule < anyStateRule {
+ sentenceBreak, rule = anyPropProp, anyPropRule
}
- } else if okAnyProp {
+ } else if anyPropState >= 0 {
// We only have a specific state.
- newState, sentenceBreak, rule = transAnyProp[0], transAnyProp[1] == sbBreak, transAnyProp[2]
+ newState, sentenceBreak, rule = anyPropState, anyPropProp, anyPropRule
// This branch will probably never be reached because okAnyState will
// always be true given the current transition map. But we keep it here
// for future modifications to the transition map where this may not be
// true anymore.
- } else if okAnyState {
+ } else if anyStateState >= 0 {
// We only have a specific property.
- newState, sentenceBreak, rule = transAnyState[0], transAnyState[1] == sbBreak, transAnyState[2]
+ newState, sentenceBreak, rule = anyStateState, anyStateProp, anyStateRule
} else {
// No known transition. SB999: Any × Any.
newState, sentenceBreak, rule = sbAny, false, 9990
diff --git a/vendor/github.com/rivo/uniseg/step.go b/vendor/github.com/rivo/uniseg/step.go
index 6eca4b5dc..9b72c5e59 100644
--- a/vendor/github.com/rivo/uniseg/step.go
+++ b/vendor/github.com/rivo/uniseg/step.go
@@ -100,7 +100,7 @@ func Step(b []byte, state int) (cluster, rest []byte, boundaries int, newState i
if len(b) <= length { // If we're already past the end, there is nothing else to parse.
var prop int
if state < 0 {
- prop = property(graphemeCodePoints, r)
+ prop = propertyGraphemes(r)
} else {
prop = state >> shiftPropState
}
@@ -150,16 +150,14 @@ func Step(b []byte, state int) (cluster, rest []byte, boundaries int, newState i
return b[:length], b[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState)
}
- if r == vs16 {
- width = 2
- } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
- width += runeWidth(r, prop)
- } else if firstProp == prExtendedPictographic {
+ if firstProp == prExtendedPictographic {
if r == vs15 {
width = 1
- } else {
+ } else if r == vs16 {
width = 2
}
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
}
length += l
@@ -179,7 +177,7 @@ func StepString(str string, state int) (cluster, rest string, boundaries int, ne
// Extract the first rune.
r, length := utf8.DecodeRuneInString(str)
if len(str) <= length { // If we're already past the end, there is nothing else to parse.
- prop := property(graphemeCodePoints, r)
+ prop := propertyGraphemes(r)
return str, "", LineMustBreak | (1 << shiftWord) | (1 << shiftSentence) | (runeWidth(r, prop) << ShiftWidth), grAny | (wbAny << shiftWordState) | (sbAny << shiftSentenceState) | (lbAny << shiftLineState)
}
@@ -226,16 +224,14 @@ func StepString(str string, state int) (cluster, rest string, boundaries int, ne
return str[:length], str[length:], boundary, graphemeState | (wordState << shiftWordState) | (sentenceState << shiftSentenceState) | (lineState << shiftLineState) | (prop << shiftPropState)
}
- if r == vs16 {
- width = 2
- } else if firstProp != prExtendedPictographic && firstProp != prRegionalIndicator && firstProp != prL {
- width += runeWidth(r, prop)
- } else if firstProp == prExtendedPictographic {
+ if firstProp == prExtendedPictographic {
if r == vs15 {
width = 1
- } else {
+ } else if r == vs16 {
width = 2
}
+ } else if firstProp != prRegionalIndicator && firstProp != prL {
+ width += runeWidth(r, prop)
}
length += l
diff --git a/vendor/github.com/rivo/uniseg/width.go b/vendor/github.com/rivo/uniseg/width.go
index 12a57cc2e..975a9f134 100644
--- a/vendor/github.com/rivo/uniseg/width.go
+++ b/vendor/github.com/rivo/uniseg/width.go
@@ -1,5 +1,10 @@
package uniseg
+// EastAsianAmbiguousWidth specifies the monospace width for East Asian
+// characters classified as Ambiguous. The default is 1 but some rare fonts
+// render them with a width of 2.
+var EastAsianAmbiguousWidth = 1
+
// runeWidth returns the monospace width for the given rune. The provided
// grapheme property is a value mapped by the [graphemeCodePoints] table.
//
@@ -33,9 +38,11 @@ func runeWidth(r rune, graphemeProperty int) int {
return 4
}
- switch property(eastAsianWidth, r) {
+ switch propertyEastAsianWidth(r) {
case prW, prF:
return 2
+ case prA:
+ return EastAsianAmbiguousWidth
}
return 1
diff --git a/vendor/github.com/rivo/uniseg/wordproperties.go b/vendor/github.com/rivo/uniseg/wordproperties.go
index 805cc536c..277ca1006 100644
--- a/vendor/github.com/rivo/uniseg/wordproperties.go
+++ b/vendor/github.com/rivo/uniseg/wordproperties.go
@@ -1,13 +1,13 @@
-package uniseg
-
// Code generated via go generate from gen_properties.go. DO NOT EDIT.
+package uniseg
+
// workBreakCodePoints are taken from
-// https://www.unicode.org/Public/14.0.0/ucd/auxiliary/WordBreakProperty.txt
+// https://www.unicode.org/Public/15.0.0/ucd/auxiliary/WordBreakProperty.txt
// and
-// https://unicode.org/Public/14.0.0/ucd/emoji/emoji-data.txt
+// https://unicode.org/Public/15.0.0/ucd/emoji/emoji-data.txt
// ("Extended_Pictographic" only)
-// on September 10, 2022. See https://www.unicode.org/license.html for the Unicode
+// on September 5, 2023. See https://www.unicode.org/license.html for the Unicode
// license agreement.
var workBreakCodePoints = [][3]int{
{0x000A, 0x000A, prLF}, // Cc <control-000A>
@@ -318,6 +318,7 @@ var workBreakCodePoints = [][3]int{
{0x0CE2, 0x0CE3, prExtend}, // Mn [2] KANNADA VOWEL SIGN VOCALIC L..KANNADA VOWEL SIGN VOCALIC LL
{0x0CE6, 0x0CEF, prNumeric}, // Nd [10] KANNADA DIGIT ZERO..KANNADA DIGIT NINE
{0x0CF1, 0x0CF2, prALetter}, // Lo [2] KANNADA SIGN JIHVAMULIYA..KANNADA SIGN UPADHMANIYA
+ {0x0CF3, 0x0CF3, prExtend}, // Mc KANNADA SIGN COMBINING ANUSVARA ABOVE RIGHT
{0x0D00, 0x0D01, prExtend}, // Mn [2] MALAYALAM SIGN COMBINING ANUSVARA ABOVE..MALAYALAM SIGN CANDRABINDU
{0x0D02, 0x0D03, prExtend}, // Mc [2] MALAYALAM SIGN ANUSVARA..MALAYALAM SIGN VISARGA
{0x0D04, 0x0D0C, prALetter}, // Lo [9] MALAYALAM LETTER VEDIC ANUSVARA..MALAYALAM LETTER VOCALIC L
@@ -357,7 +358,7 @@ var workBreakCodePoints = [][3]int{
{0x0E50, 0x0E59, prNumeric}, // Nd [10] THAI DIGIT ZERO..THAI DIGIT NINE
{0x0EB1, 0x0EB1, prExtend}, // Mn LAO VOWEL SIGN MAI KAN
{0x0EB4, 0x0EBC, prExtend}, // Mn [9] LAO VOWEL SIGN I..LAO SEMIVOWEL SIGN LO
- {0x0EC8, 0x0ECD, prExtend}, // Mn [6] LAO TONE MAI EK..LAO NIGGAHITA
+ {0x0EC8, 0x0ECE, prExtend}, // Mn [7] LAO TONE MAI EK..LAO YAMAKKAN
{0x0ED0, 0x0ED9, prNumeric}, // Nd [10] LAO DIGIT ZERO..LAO DIGIT NINE
{0x0F00, 0x0F00, prALetter}, // Lo TIBETAN SYLLABLE OM
{0x0F18, 0x0F19, prExtend}, // Mn [2] TIBETAN ASTROLOGICAL SIGN -KHYUD PA..TIBETAN ASTROLOGICAL SIGN SDONG TSHUGS
@@ -1093,6 +1094,7 @@ var workBreakCodePoints = [][3]int{
{0x10E80, 0x10EA9, prALetter}, // Lo [42] YEZIDI LETTER ELIF..YEZIDI LETTER ET
{0x10EAB, 0x10EAC, prExtend}, // Mn [2] YEZIDI COMBINING HAMZA MARK..YEZIDI COMBINING MADDA MARK
{0x10EB0, 0x10EB1, prALetter}, // Lo [2] YEZIDI LETTER LAM WITH DOT ABOVE..YEZIDI LETTER YOT WITH CIRCUMFLEX ABOVE
+ {0x10EFD, 0x10EFF, prExtend}, // Mn [3] ARABIC SMALL LOW WORD SAKTA..ARABIC SMALL LOW WORD MADDA
{0x10F00, 0x10F1C, prALetter}, // Lo [29] OLD SOGDIAN LETTER ALEPH..OLD SOGDIAN LETTER FINAL TAW WITH VERTICAL TAIL
{0x10F27, 0x10F27, prALetter}, // Lo OLD SOGDIAN LIGATURE AYIN-DALETH
{0x10F30, 0x10F45, prALetter}, // Lo [22] SOGDIAN LETTER ALEPH..SOGDIAN INDEPENDENT SHIN
@@ -1157,6 +1159,8 @@ var workBreakCodePoints = [][3]int{
{0x11235, 0x11235, prExtend}, // Mc KHOJKI SIGN VIRAMA
{0x11236, 0x11237, prExtend}, // Mn [2] KHOJKI SIGN NUKTA..KHOJKI SIGN SHADDA
{0x1123E, 0x1123E, prExtend}, // Mn KHOJKI SIGN SUKUN
+ {0x1123F, 0x11240, prALetter}, // Lo [2] KHOJKI LETTER QA..KHOJKI LETTER SHORT I
+ {0x11241, 0x11241, prExtend}, // Mn KHOJKI VOWEL SIGN VOCALIC R
{0x11280, 0x11286, prALetter}, // Lo [7] MULTANI LETTER A..MULTANI LETTER GA
{0x11288, 0x11288, prALetter}, // Lo MULTANI LETTER GHA
{0x1128A, 0x1128D, prALetter}, // Lo [4] MULTANI LETTER CA..MULTANI LETTER JJA
@@ -1337,13 +1341,28 @@ var workBreakCodePoints = [][3]int{
{0x11EE0, 0x11EF2, prALetter}, // Lo [19] MAKASAR LETTER KA..MAKASAR ANGKA
{0x11EF3, 0x11EF4, prExtend}, // Mn [2] MAKASAR VOWEL SIGN I..MAKASAR VOWEL SIGN U
{0x11EF5, 0x11EF6, prExtend}, // Mc [2] MAKASAR VOWEL SIGN E..MAKASAR VOWEL SIGN O
+ {0x11F00, 0x11F01, prExtend}, // Mn [2] KAWI SIGN CANDRABINDU..KAWI SIGN ANUSVARA
+ {0x11F02, 0x11F02, prALetter}, // Lo KAWI SIGN REPHA
+ {0x11F03, 0x11F03, prExtend}, // Mc KAWI SIGN VISARGA
+ {0x11F04, 0x11F10, prALetter}, // Lo [13] KAWI LETTER A..KAWI LETTER O
+ {0x11F12, 0x11F33, prALetter}, // Lo [34] KAWI LETTER KA..KAWI LETTER JNYA
+ {0x11F34, 0x11F35, prExtend}, // Mc [2] KAWI VOWEL SIGN AA..KAWI VOWEL SIGN ALTERNATE AA
+ {0x11F36, 0x11F3A, prExtend}, // Mn [5] KAWI VOWEL SIGN I..KAWI VOWEL SIGN VOCALIC R
+ {0x11F3E, 0x11F3F, prExtend}, // Mc [2] KAWI VOWEL SIGN E..KAWI VOWEL SIGN AI
+ {0x11F40, 0x11F40, prExtend}, // Mn KAWI VOWEL SIGN EU
+ {0x11F41, 0x11F41, prExtend}, // Mc KAWI SIGN KILLER
+ {0x11F42, 0x11F42, prExtend}, // Mn KAWI CONJOINER
+ {0x11F50, 0x11F59, prNumeric}, // Nd [10] KAWI DIGIT ZERO..KAWI DIGIT NINE
{0x11FB0, 0x11FB0, prALetter}, // Lo LISU LETTER YHA
{0x12000, 0x12399, prALetter}, // Lo [922] CUNEIFORM SIGN A..CUNEIFORM SIGN U U
{0x12400, 0x1246E, prALetter}, // Nl [111] CUNEIFORM NUMERIC SIGN TWO ASH..CUNEIFORM NUMERIC SIGN NINE U VARIANT FORM
{0x12480, 0x12543, prALetter}, // Lo [196] CUNEIFORM SIGN AB TIMES NUN TENU..CUNEIFORM SIGN ZU5 TIMES THREE DISH TENU
{0x12F90, 0x12FF0, prALetter}, // Lo [97] CYPRO-MINOAN SIGN CM001..CYPRO-MINOAN SIGN CM114
- {0x13000, 0x1342E, prALetter}, // Lo [1071] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH AA032
- {0x13430, 0x13438, prFormat}, // Cf [9] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END SEGMENT
+ {0x13000, 0x1342F, prALetter}, // Lo [1072] EGYPTIAN HIEROGLYPH A001..EGYPTIAN HIEROGLYPH V011D
+ {0x13430, 0x1343F, prFormat}, // Cf [16] EGYPTIAN HIEROGLYPH VERTICAL JOINER..EGYPTIAN HIEROGLYPH END WALLED ENCLOSURE
+ {0x13440, 0x13440, prExtend}, // Mn EGYPTIAN HIEROGLYPH MIRROR HORIZONTALLY
+ {0x13441, 0x13446, prALetter}, // Lo [6] EGYPTIAN HIEROGLYPH FULL BLANK..EGYPTIAN HIEROGLYPH WIDE LOST SIGN
+ {0x13447, 0x13455, prExtend}, // Mn [15] EGYPTIAN HIEROGLYPH MODIFIER DAMAGED AT TOP START..EGYPTIAN HIEROGLYPH MODIFIER DAMAGED
{0x14400, 0x14646, prALetter}, // Lo [583] ANATOLIAN HIEROGLYPH A001..ANATOLIAN HIEROGLYPH A530
{0x16800, 0x16A38, prALetter}, // Lo [569] BAMUM LETTER PHASE-A NGKUE MFON..BAMUM LETTER PHASE-F VUEQ
{0x16A40, 0x16A5E, prALetter}, // Lo [31] MRO LETTER TA..MRO LETTER TEK
@@ -1374,6 +1393,7 @@ var workBreakCodePoints = [][3]int{
{0x1AFFD, 0x1AFFE, prKatakana}, // Lm [2] KATAKANA LETTER MINNAN NASALIZED TONE-7..KATAKANA LETTER MINNAN NASALIZED TONE-8
{0x1B000, 0x1B000, prKatakana}, // Lo KATAKANA LETTER ARCHAIC E
{0x1B120, 0x1B122, prKatakana}, // Lo [3] KATAKANA LETTER ARCHAIC YI..KATAKANA LETTER ARCHAIC WU
+ {0x1B155, 0x1B155, prKatakana}, // Lo KATAKANA LETTER SMALL KO
{0x1B164, 0x1B167, prKatakana}, // Lo [4] KATAKANA LETTER SMALL WI..KATAKANA LETTER SMALL N
{0x1BC00, 0x1BC6A, prALetter}, // Lo [107] DUPLOYAN LETTER H..DUPLOYAN LETTER VOCALIC M
{0x1BC70, 0x1BC7C, prALetter}, // Lo [13] DUPLOYAN AFFIX LEFT HORIZONTAL SECANT..DUPLOYAN AFFIX ATTACHED TANGENT HOOK
@@ -1431,11 +1451,14 @@ var workBreakCodePoints = [][3]int{
{0x1DF00, 0x1DF09, prALetter}, // L& [10] LATIN SMALL LETTER FENG DIGRAPH WITH TRILL..LATIN SMALL LETTER T WITH HOOK AND RETROFLEX HOOK
{0x1DF0A, 0x1DF0A, prALetter}, // Lo LATIN LETTER RETROFLEX CLICK WITH RETROFLEX HOOK
{0x1DF0B, 0x1DF1E, prALetter}, // L& [20] LATIN SMALL LETTER ESH WITH DOUBLE BAR..LATIN SMALL LETTER S WITH CURL
+ {0x1DF25, 0x1DF2A, prALetter}, // L& [6] LATIN SMALL LETTER D WITH MID-HEIGHT LEFT HOOK..LATIN SMALL LETTER T WITH MID-HEIGHT LEFT HOOK
{0x1E000, 0x1E006, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER AZU..COMBINING GLAGOLITIC LETTER ZHIVETE
{0x1E008, 0x1E018, prExtend}, // Mn [17] COMBINING GLAGOLITIC LETTER ZEMLJA..COMBINING GLAGOLITIC LETTER HERU
{0x1E01B, 0x1E021, prExtend}, // Mn [7] COMBINING GLAGOLITIC LETTER SHTA..COMBINING GLAGOLITIC LETTER YATI
{0x1E023, 0x1E024, prExtend}, // Mn [2] COMBINING GLAGOLITIC LETTER YU..COMBINING GLAGOLITIC LETTER SMALL YUS
{0x1E026, 0x1E02A, prExtend}, // Mn [5] COMBINING GLAGOLITIC LETTER YO..COMBINING GLAGOLITIC LETTER FITA
+ {0x1E030, 0x1E06D, prALetter}, // Lm [62] MODIFIER LETTER CYRILLIC SMALL A..MODIFIER LETTER CYRILLIC SMALL STRAIGHT U WITH STROKE
+ {0x1E08F, 0x1E08F, prExtend}, // Mn COMBINING CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
{0x1E100, 0x1E12C, prALetter}, // Lo [45] NYIAKENG PUACHUE HMONG LETTER MA..NYIAKENG PUACHUE HMONG LETTER W
{0x1E130, 0x1E136, prExtend}, // Mn [7] NYIAKENG PUACHUE HMONG TONE-B..NYIAKENG PUACHUE HMONG TONE-D
{0x1E137, 0x1E13D, prALetter}, // Lm [7] NYIAKENG PUACHUE HMONG SIGN FOR PERSON..NYIAKENG PUACHUE HMONG SYLLABLE LENGTHENER
@@ -1446,6 +1469,10 @@ var workBreakCodePoints = [][3]int{
{0x1E2C0, 0x1E2EB, prALetter}, // Lo [44] WANCHO LETTER AA..WANCHO LETTER YIH
{0x1E2EC, 0x1E2EF, prExtend}, // Mn [4] WANCHO TONE TUP..WANCHO TONE KOINI
{0x1E2F0, 0x1E2F9, prNumeric}, // Nd [10] WANCHO DIGIT ZERO..WANCHO DIGIT NINE
+ {0x1E4D0, 0x1E4EA, prALetter}, // Lo [27] NAG MUNDARI LETTER O..NAG MUNDARI LETTER ELL
+ {0x1E4EB, 0x1E4EB, prALetter}, // Lm NAG MUNDARI SIGN OJOD
+ {0x1E4EC, 0x1E4EF, prExtend}, // Mn [4] NAG MUNDARI SIGN MUHOR..NAG MUNDARI SIGN SUTUH
+ {0x1E4F0, 0x1E4F9, prNumeric}, // Nd [10] NAG MUNDARI DIGIT ZERO..NAG MUNDARI DIGIT NINE
{0x1E7E0, 0x1E7E6, prALetter}, // Lo [7] ETHIOPIC SYLLABLE HHYA..ETHIOPIC SYLLABLE HHYO
{0x1E7E8, 0x1E7EB, prALetter}, // Lo [4] ETHIOPIC SYLLABLE GURAGE HHWA..ETHIOPIC SYLLABLE HHWE
{0x1E7ED, 0x1E7EE, prALetter}, // Lo [2] ETHIOPIC SYLLABLE GURAGE MWI..ETHIOPIC SYLLABLE GURAGE MWEE
@@ -1740,7 +1767,8 @@ var workBreakCodePoints = [][3]int{
{0x1F6D3, 0x1F6D4, prExtendedPictographic}, // E0.0 [2] (🛓..🛔) STUPA..PAGODA
{0x1F6D5, 0x1F6D5, prExtendedPictographic}, // E12.0 [1] (🛕) hindu temple
{0x1F6D6, 0x1F6D7, prExtendedPictographic}, // E13.0 [2] (🛖..🛗) hut..elevator
- {0x1F6D8, 0x1F6DC, prExtendedPictographic}, // E0.0 [5] (🛘..🛜) <reserved-1F6D8>..<reserved-1F6DC>
+ {0x1F6D8, 0x1F6DB, prExtendedPictographic}, // E0.0 [4] (🛘..🛛) <reserved-1F6D8>..<reserved-1F6DB>
+ {0x1F6DC, 0x1F6DC, prExtendedPictographic}, // E15.0 [1] (🛜) wireless
{0x1F6DD, 0x1F6DF, prExtendedPictographic}, // E14.0 [3] (🛝..🛟) playground slide..ring buoy
{0x1F6E0, 0x1F6E5, prExtendedPictographic}, // E0.7 [6] (🛠️..🛥️) hammer and wrench..motor boat
{0x1F6E6, 0x1F6E8, prExtendedPictographic}, // E0.0 [3] (🛦..🛨) UP-POINTING MILITARY AIRPLANE..UP-POINTING SMALL AIRPLANE
@@ -1757,7 +1785,7 @@ var workBreakCodePoints = [][3]int{
{0x1F6FA, 0x1F6FA, prExtendedPictographic}, // E12.0 [1] (🛺) auto rickshaw
{0x1F6FB, 0x1F6FC, prExtendedPictographic}, // E13.0 [2] (🛻..🛼) pickup truck..roller skate
{0x1F6FD, 0x1F6FF, prExtendedPictographic}, // E0.0 [3] (🛽..🛿) <reserved-1F6FD>..<reserved-1F6FF>
- {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) <reserved-1F774>..<reserved-1F77F>
+ {0x1F774, 0x1F77F, prExtendedPictographic}, // E0.0 [12] (🝴..🝿) LOT OF FORTUNE..ORCUS
{0x1F7D5, 0x1F7DF, prExtendedPictographic}, // E0.0 [11] (🟕..🟟) CIRCLED TRIANGLE..<reserved-1F7DF>
{0x1F7E0, 0x1F7EB, prExtendedPictographic}, // E12.0 [12] (🟠..🟫) orange circle..brown square
{0x1F7EC, 0x1F7EF, prExtendedPictographic}, // E0.0 [4] (🟬..🟯) <reserved-1F7EC>..<reserved-1F7EF>
@@ -1816,30 +1844,37 @@ var workBreakCodePoints = [][3]int{
{0x1FA00, 0x1FA6F, prExtendedPictographic}, // E0.0 [112] (🨀..🩯) NEUTRAL CHESS KING..<reserved-1FA6F>
{0x1FA70, 0x1FA73, prExtendedPictographic}, // E12.0 [4] (🩰..🩳) ballet shoes..shorts
{0x1FA74, 0x1FA74, prExtendedPictographic}, // E13.0 [1] (🩴) thong sandal
- {0x1FA75, 0x1FA77, prExtendedPictographic}, // E0.0 [3] (🩵..🩷) <reserved-1FA75>..<reserved-1FA77>
+ {0x1FA75, 0x1FA77, prExtendedPictographic}, // E15.0 [3] (🩵..🩷) light blue heart..pink heart
{0x1FA78, 0x1FA7A, prExtendedPictographic}, // E12.0 [3] (🩸..🩺) drop of blood..stethoscope
{0x1FA7B, 0x1FA7C, prExtendedPictographic}, // E14.0 [2] (🩻..🩼) x-ray..crutch
{0x1FA7D, 0x1FA7F, prExtendedPictographic}, // E0.0 [3] (🩽..🩿) <reserved-1FA7D>..<reserved-1FA7F>
{0x1FA80, 0x1FA82, prExtendedPictographic}, // E12.0 [3] (🪀..🪂) yo-yo..parachute
{0x1FA83, 0x1FA86, prExtendedPictographic}, // E13.0 [4] (🪃..🪆) boomerang..nesting dolls
- {0x1FA87, 0x1FA8F, prExtendedPictographic}, // E0.0 [9] (🪇..🪏) <reserved-1FA87>..<reserved-1FA8F>
+ {0x1FA87, 0x1FA88, prExtendedPictographic}, // E15.0 [2] (🪇..🪈) maracas..flute
+ {0x1FA89, 0x1FA8F, prExtendedPictographic}, // E0.0 [7] (🪉..🪏) <reserved-1FA89>..<reserved-1FA8F>
{0x1FA90, 0x1FA95, prExtendedPictographic}, // E12.0 [6] (🪐..🪕) ringed planet..banjo
{0x1FA96, 0x1FAA8, prExtendedPictographic}, // E13.0 [19] (🪖..🪨) military helmet..rock
{0x1FAA9, 0x1FAAC, prExtendedPictographic}, // E14.0 [4] (🪩..🪬) mirror ball..hamsa
- {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E0.0 [3] (🪭..🪯) <reserved-1FAAD>..<reserved-1FAAF>
+ {0x1FAAD, 0x1FAAF, prExtendedPictographic}, // E15.0 [3] (🪭..🪯) folding hand fan..khanda
{0x1FAB0, 0x1FAB6, prExtendedPictographic}, // E13.0 [7] (🪰..🪶) fly..feather
{0x1FAB7, 0x1FABA, prExtendedPictographic}, // E14.0 [4] (🪷..🪺) lotus..nest with eggs
- {0x1FABB, 0x1FABF, prExtendedPictographic}, // E0.0 [5] (🪻..🪿) <reserved-1FABB>..<reserved-1FABF>
+ {0x1FABB, 0x1FABD, prExtendedPictographic}, // E15.0 [3] (🪻..🪽) hyacinth..wing
+ {0x1FABE, 0x1FABE, prExtendedPictographic}, // E0.0 [1] (🪾) <reserved-1FABE>
+ {0x1FABF, 0x1FABF, prExtendedPictographic}, // E15.0 [1] (🪿) goose
{0x1FAC0, 0x1FAC2, prExtendedPictographic}, // E13.0 [3] (🫀..🫂) anatomical heart..people hugging
{0x1FAC3, 0x1FAC5, prExtendedPictographic}, // E14.0 [3] (🫃..🫅) pregnant man..person with crown
- {0x1FAC6, 0x1FACF, prExtendedPictographic}, // E0.0 [10] (🫆..🫏) <reserved-1FAC6>..<reserved-1FACF>
+ {0x1FAC6, 0x1FACD, prExtendedPictographic}, // E0.0 [8] (🫆..🫍) <reserved-1FAC6>..<reserved-1FACD>
+ {0x1FACE, 0x1FACF, prExtendedPictographic}, // E15.0 [2] (🫎..🫏) moose..donkey
{0x1FAD0, 0x1FAD6, prExtendedPictographic}, // E13.0 [7] (🫐..🫖) blueberries..teapot
{0x1FAD7, 0x1FAD9, prExtendedPictographic}, // E14.0 [3] (🫗..🫙) pouring liquid..jar
- {0x1FADA, 0x1FADF, prExtendedPictographic}, // E0.0 [6] (🫚..🫟) <reserved-1FADA>..<reserved-1FADF>
+ {0x1FADA, 0x1FADB, prExtendedPictographic}, // E15.0 [2] (🫚..🫛) ginger root..pea pod
+ {0x1FADC, 0x1FADF, prExtendedPictographic}, // E0.0 [4] (🫜..🫟) <reserved-1FADC>..<reserved-1FADF>
{0x1FAE0, 0x1FAE7, prExtendedPictographic}, // E14.0 [8] (🫠..🫧) melting face..bubbles
- {0x1FAE8, 0x1FAEF, prExtendedPictographic}, // E0.0 [8] (🫨..🫯) <reserved-1FAE8>..<reserved-1FAEF>
+ {0x1FAE8, 0x1FAE8, prExtendedPictographic}, // E15.0 [1] (🫨) shaking face
+ {0x1FAE9, 0x1FAEF, prExtendedPictographic}, // E0.0 [7] (🫩..🫯) <reserved-1FAE9>..<reserved-1FAEF>
{0x1FAF0, 0x1FAF6, prExtendedPictographic}, // E14.0 [7] (🫰..🫶) hand with index finger and thumb crossed..heart hands
- {0x1FAF7, 0x1FAFF, prExtendedPictographic}, // E0.0 [9] (🫷..🫿) <reserved-1FAF7>..<reserved-1FAFF>
+ {0x1FAF7, 0x1FAF8, prExtendedPictographic}, // E15.0 [2] (🫷..🫸) leftwards pushing hand..rightwards pushing hand
+ {0x1FAF9, 0x1FAFF, prExtendedPictographic}, // E0.0 [7] (🫹..🫿) <reserved-1FAF9>..<reserved-1FAFF>
{0x1FBF0, 0x1FBF9, prNumeric}, // Nd [10] SEGMENTED DIGIT ZERO..SEGMENTED DIGIT NINE
{0x1FC00, 0x1FFFD, prExtendedPictographic}, // E0.0[1022] (🰀..🿽) <reserved-1FC00>..<reserved-1FFFD>
{0xE0001, 0xE0001, prFormat}, // Cf LANGUAGE TAG
diff --git a/vendor/github.com/rivo/uniseg/wordrules.go b/vendor/github.com/rivo/uniseg/wordrules.go
index 325407e40..57a8c6831 100644
--- a/vendor/github.com/rivo/uniseg/wordrules.go
+++ b/vendor/github.com/rivo/uniseg/wordrules.go
@@ -22,82 +22,121 @@ const (
wbZWJBit = 16 // This bit is set for any states followed by at least one zero-width joiner (see WB4 and WB3c).
)
-// The word break parser's breaking instructions.
-const (
- wbDontBreak = iota
- wbBreak
-)
-
-// The word break parser's state transitions. It's anologous to grTransitions,
-// see comments there for details. Unicode version 14.0.0.
-var wbTransitions = map[[2]int][3]int{
+// wbTransitions implements the word break parser's state transitions. It's
+// anologous to [grTransitions], see comments there for details.
+//
+// Unicode version 15.0.0.
+func wbTransitions(state, prop int) (newState int, wordBreak bool, rule int) {
+ switch uint64(state) | uint64(prop)<<32 {
// WB3b.
- {wbAny, prNewline}: {wbNewline, wbBreak, 32},
- {wbAny, prCR}: {wbCR, wbBreak, 32},
- {wbAny, prLF}: {wbLF, wbBreak, 32},
+ case wbAny | prNewline<<32:
+ return wbNewline, true, 32
+ case wbAny | prCR<<32:
+ return wbCR, true, 32
+ case wbAny | prLF<<32:
+ return wbLF, true, 32
// WB3a.
- {wbNewline, prAny}: {wbAny, wbBreak, 31},
- {wbCR, prAny}: {wbAny, wbBreak, 31},
- {wbLF, prAny}: {wbAny, wbBreak, 31},
+ case wbNewline | prAny<<32:
+ return wbAny, true, 31
+ case wbCR | prAny<<32:
+ return wbAny, true, 31
+ case wbLF | prAny<<32:
+ return wbAny, true, 31
// WB3.
- {wbCR, prLF}: {wbLF, wbDontBreak, 30},
+ case wbCR | prLF<<32:
+ return wbLF, false, 30
// WB3d.
- {wbAny, prWSegSpace}: {wbWSegSpace, wbBreak, 9990},
- {wbWSegSpace, prWSegSpace}: {wbWSegSpace, wbDontBreak, 34},
+ case wbAny | prWSegSpace<<32:
+ return wbWSegSpace, true, 9990
+ case wbWSegSpace | prWSegSpace<<32:
+ return wbWSegSpace, false, 34
// WB5.
- {wbAny, prALetter}: {wbALetter, wbBreak, 9990},
- {wbAny, prHebrewLetter}: {wbHebrewLetter, wbBreak, 9990},
- {wbALetter, prALetter}: {wbALetter, wbDontBreak, 50},
- {wbALetter, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 50},
- {wbHebrewLetter, prALetter}: {wbALetter, wbDontBreak, 50},
- {wbHebrewLetter, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 50},
+ case wbAny | prALetter<<32:
+ return wbALetter, true, 9990
+ case wbAny | prHebrewLetter<<32:
+ return wbHebrewLetter, true, 9990
+ case wbALetter | prALetter<<32:
+ return wbALetter, false, 50
+ case wbALetter | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 50
+ case wbHebrewLetter | prALetter<<32:
+ return wbALetter, false, 50
+ case wbHebrewLetter | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 50
// WB7. Transitions to wbWB7 handled by transitionWordBreakState().
- {wbWB7, prALetter}: {wbALetter, wbDontBreak, 70},
- {wbWB7, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 70},
+ case wbWB7 | prALetter<<32:
+ return wbALetter, false, 70
+ case wbWB7 | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 70
// WB7a.
- {wbHebrewLetter, prSingleQuote}: {wbAny, wbDontBreak, 71},
+ case wbHebrewLetter | prSingleQuote<<32:
+ return wbAny, false, 71
// WB7c. Transitions to wbWB7c handled by transitionWordBreakState().
- {wbWB7c, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 73},
+ case wbWB7c | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 73
// WB8.
- {wbAny, prNumeric}: {wbNumeric, wbBreak, 9990},
- {wbNumeric, prNumeric}: {wbNumeric, wbDontBreak, 80},
+ case wbAny | prNumeric<<32:
+ return wbNumeric, true, 9990
+ case wbNumeric | prNumeric<<32:
+ return wbNumeric, false, 80
// WB9.
- {wbALetter, prNumeric}: {wbNumeric, wbDontBreak, 90},
- {wbHebrewLetter, prNumeric}: {wbNumeric, wbDontBreak, 90},
+ case wbALetter | prNumeric<<32:
+ return wbNumeric, false, 90
+ case wbHebrewLetter | prNumeric<<32:
+ return wbNumeric, false, 90
// WB10.
- {wbNumeric, prALetter}: {wbALetter, wbDontBreak, 100},
- {wbNumeric, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 100},
+ case wbNumeric | prALetter<<32:
+ return wbALetter, false, 100
+ case wbNumeric | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 100
// WB11. Transitions to wbWB11 handled by transitionWordBreakState().
- {wbWB11, prNumeric}: {wbNumeric, wbDontBreak, 110},
+ case wbWB11 | prNumeric<<32:
+ return wbNumeric, false, 110
// WB13.
- {wbAny, prKatakana}: {wbKatakana, wbBreak, 9990},
- {wbKatakana, prKatakana}: {wbKatakana, wbDontBreak, 130},
+ case wbAny | prKatakana<<32:
+ return wbKatakana, true, 9990
+ case wbKatakana | prKatakana<<32:
+ return wbKatakana, false, 130
// WB13a.
- {wbAny, prExtendNumLet}: {wbExtendNumLet, wbBreak, 9990},
- {wbALetter, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131},
- {wbHebrewLetter, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131},
- {wbNumeric, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131},
- {wbKatakana, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131},
- {wbExtendNumLet, prExtendNumLet}: {wbExtendNumLet, wbDontBreak, 131},
+ case wbAny | prExtendNumLet<<32:
+ return wbExtendNumLet, true, 9990
+ case wbALetter | prExtendNumLet<<32:
+ return wbExtendNumLet, false, 131
+ case wbHebrewLetter | prExtendNumLet<<32:
+ return wbExtendNumLet, false, 131
+ case wbNumeric | prExtendNumLet<<32:
+ return wbExtendNumLet, false, 131
+ case wbKatakana | prExtendNumLet<<32:
+ return wbExtendNumLet, false, 131
+ case wbExtendNumLet | prExtendNumLet<<32:
+ return wbExtendNumLet, false, 131
// WB13b.
- {wbExtendNumLet, prALetter}: {wbALetter, wbDontBreak, 132},
- {wbExtendNumLet, prHebrewLetter}: {wbHebrewLetter, wbDontBreak, 132},
- {wbExtendNumLet, prNumeric}: {wbNumeric, wbDontBreak, 132},
- {wbExtendNumLet, prKatakana}: {prKatakana, wbDontBreak, 132},
+ case wbExtendNumLet | prALetter<<32:
+ return wbALetter, false, 132
+ case wbExtendNumLet | prHebrewLetter<<32:
+ return wbHebrewLetter, false, 132
+ case wbExtendNumLet | prNumeric<<32:
+ return wbNumeric, false, 132
+ case wbExtendNumLet | prKatakana<<32:
+ return wbKatakana, false, 132
+
+ default:
+ return -1, false, -1
+ }
}
// transitionWordBreakState determines the new state of the word break parser
@@ -141,30 +180,27 @@ func transitionWordBreakState(state int, r rune, b []byte, str string) (newState
// Find the applicable transition in the table.
var rule int
- transition, ok := wbTransitions[[2]int{state, nextProperty}]
- if ok {
- // We have a specific transition. We'll use it.
- newState, wordBreak, rule = transition[0], transition[1] == wbBreak, transition[2]
- } else {
+ newState, wordBreak, rule = wbTransitions(state, nextProperty)
+ if newState < 0 {
// No specific transition found. Try the less specific ones.
- transAnyProp, okAnyProp := wbTransitions[[2]int{state, prAny}]
- transAnyState, okAnyState := wbTransitions[[2]int{wbAny, nextProperty}]
- if okAnyProp && okAnyState {
+ anyPropState, anyPropWordBreak, anyPropRule := wbTransitions(state, prAny)
+ anyStateState, anyStateWordBreak, anyStateRule := wbTransitions(wbAny, nextProperty)
+ if anyPropState >= 0 && anyStateState >= 0 {
// Both apply. We'll use a mix (see comments for grTransitions).
- newState, wordBreak, rule = transAnyState[0], transAnyState[1] == wbBreak, transAnyState[2]
- if transAnyProp[2] < transAnyState[2] {
- wordBreak, rule = transAnyProp[1] == wbBreak, transAnyProp[2]
+ newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule
+ if anyPropRule < anyStateRule {
+ wordBreak, rule = anyPropWordBreak, anyPropRule
}
- } else if okAnyProp {
+ } else if anyPropState >= 0 {
// We only have a specific state.
- newState, wordBreak, rule = transAnyProp[0], transAnyProp[1] == wbBreak, transAnyProp[2]
+ newState, wordBreak, rule = anyPropState, anyPropWordBreak, anyPropRule
// This branch will probably never be reached because okAnyState will
// always be true given the current transition map. But we keep it here
// for future modifications to the transition map where this may not be
// true anymore.
- } else if okAnyState {
+ } else if anyStateState >= 0 {
// We only have a specific property.
- newState, wordBreak, rule = transAnyState[0], transAnyState[1] == wbBreak, transAnyState[2]
+ newState, wordBreak, rule = anyStateState, anyStateWordBreak, anyStateRule
} else {
// No known transition. WB999: Any ÷ Any.
newState, wordBreak, rule = wbAny, true, 9990
diff --git a/vendor/github.com/rogpeppe/go-internal/LICENSE b/vendor/github.com/rogpeppe/go-internal/LICENSE
new file mode 100644
index 000000000..49ea0f928
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2018 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go
new file mode 100644
index 000000000..a8edafb3c
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/mksyscall.go
@@ -0,0 +1,7 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+//go:generate go run $GOROOT/src/syscall/mksyscall_windows.go -output zsyscall_windows.go syscall_windows.go security_windows.go psapi_windows.go symlink_windows.go
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go
new file mode 100644
index 000000000..b138e658a
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/psapi_windows.go
@@ -0,0 +1,20 @@
+// Copyright 2017 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+type PROCESS_MEMORY_COUNTERS struct {
+ CB uint32
+ PageFaultCount uint32
+ PeakWorkingSetSize uintptr
+ WorkingSetSize uintptr
+ QuotaPeakPagedPoolUsage uintptr
+ QuotaPagedPoolUsage uintptr
+ QuotaPeakNonPagedPoolUsage uintptr
+ QuotaNonPagedPoolUsage uintptr
+ PagefileUsage uintptr
+ PeakPagefileUsage uintptr
+}
+
+//sys GetProcessMemoryInfo(handle syscall.Handle, memCounters *PROCESS_MEMORY_COUNTERS, cb uint32) (err error) = psapi.GetProcessMemoryInfo
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go
new file mode 100644
index 000000000..7c6ad8fb7
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/reparse_windows.go
@@ -0,0 +1,64 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+const (
+ FSCTL_SET_REPARSE_POINT = 0x000900A4
+ IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003
+
+ SYMLINK_FLAG_RELATIVE = 1
+)
+
+// These structures are described
+// in https://msdn.microsoft.com/en-us/library/cc232007.aspx
+// and https://msdn.microsoft.com/en-us/library/cc232006.aspx.
+
+// REPARSE_DATA_BUFFER_HEADER is a common part of REPARSE_DATA_BUFFER structure.
+type REPARSE_DATA_BUFFER_HEADER struct {
+ ReparseTag uint32
+ // The size, in bytes, of the reparse data that follows
+ // the common portion of the REPARSE_DATA_BUFFER element.
+ // This value is the length of the data starting at the
+ // SubstituteNameOffset field.
+ ReparseDataLength uint16
+ Reserved uint16
+}
+
+type SymbolicLinkReparseBuffer struct {
+ // The integer that contains the offset, in bytes,
+ // of the substitute name string in the PathBuffer array,
+ // computed as an offset from byte 0 of PathBuffer. Note that
+ // this offset must be divided by 2 to get the array index.
+ SubstituteNameOffset uint16
+ // The integer that contains the length, in bytes, of the
+ // substitute name string. If this string is null-terminated,
+ // SubstituteNameLength does not include the Unicode null character.
+ SubstituteNameLength uint16
+ // PrintNameOffset is similar to SubstituteNameOffset.
+ PrintNameOffset uint16
+ // PrintNameLength is similar to SubstituteNameLength.
+ PrintNameLength uint16
+ // Flags specifies whether the substitute name is a full path name or
+ // a path name relative to the directory containing the symbolic link.
+ Flags uint32
+ PathBuffer [1]uint16
+}
+
+type MountPointReparseBuffer struct {
+ // The integer that contains the offset, in bytes,
+ // of the substitute name string in the PathBuffer array,
+ // computed as an offset from byte 0 of PathBuffer. Note that
+ // this offset must be divided by 2 to get the array index.
+ SubstituteNameOffset uint16
+ // The integer that contains the length, in bytes, of the
+ // substitute name string. If this string is null-terminated,
+ // SubstituteNameLength does not include the Unicode null character.
+ SubstituteNameLength uint16
+ // PrintNameOffset is similar to SubstituteNameOffset.
+ PrintNameOffset uint16
+ // PrintNameLength is similar to SubstituteNameLength.
+ PrintNameLength uint16
+ PathBuffer [1]uint16
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go
new file mode 100644
index 000000000..4a2dfc0c7
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/security_windows.go
@@ -0,0 +1,128 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+import (
+ "syscall"
+ "unsafe"
+)
+
+const (
+ SecurityAnonymous = 0
+ SecurityIdentification = 1
+ SecurityImpersonation = 2
+ SecurityDelegation = 3
+)
+
+//sys ImpersonateSelf(impersonationlevel uint32) (err error) = advapi32.ImpersonateSelf
+//sys RevertToSelf() (err error) = advapi32.RevertToSelf
+
+const (
+ TOKEN_ADJUST_PRIVILEGES = 0x0020
+ SE_PRIVILEGE_ENABLED = 0x00000002
+)
+
+type LUID struct {
+ LowPart uint32
+ HighPart int32
+}
+
+type LUID_AND_ATTRIBUTES struct {
+ Luid LUID
+ Attributes uint32
+}
+
+type TOKEN_PRIVILEGES struct {
+ PrivilegeCount uint32
+ Privileges [1]LUID_AND_ATTRIBUTES
+}
+
+//sys OpenThreadToken(h syscall.Handle, access uint32, openasself bool, token *syscall.Token) (err error) = advapi32.OpenThreadToken
+//sys LookupPrivilegeValue(systemname *uint16, name *uint16, luid *LUID) (err error) = advapi32.LookupPrivilegeValueW
+//sys adjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) (ret uint32, err error) [true] = advapi32.AdjustTokenPrivileges
+
+func AdjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) error {
+ ret, err := adjustTokenPrivileges(token, disableAllPrivileges, newstate, buflen, prevstate, returnlen)
+ if ret == 0 {
+ // AdjustTokenPrivileges call failed
+ return err
+ }
+ // AdjustTokenPrivileges call succeeded
+ if err == syscall.EINVAL {
+ // GetLastError returned ERROR_SUCCESS
+ return nil
+ }
+ return err
+}
+
+//sys DuplicateTokenEx(hExistingToken syscall.Token, dwDesiredAccess uint32, lpTokenAttributes *syscall.SecurityAttributes, impersonationLevel uint32, tokenType TokenType, phNewToken *syscall.Token) (err error) = advapi32.DuplicateTokenEx
+//sys SetTokenInformation(tokenHandle syscall.Token, tokenInformationClass uint32, tokenInformation uintptr, tokenInformationLength uint32) (err error) = advapi32.SetTokenInformation
+
+type SID_AND_ATTRIBUTES struct {
+ Sid *syscall.SID
+ Attributes uint32
+}
+
+type TOKEN_MANDATORY_LABEL struct {
+ Label SID_AND_ATTRIBUTES
+}
+
+func (tml *TOKEN_MANDATORY_LABEL) Size() uint32 {
+ return uint32(unsafe.Sizeof(TOKEN_MANDATORY_LABEL{})) + syscall.GetLengthSid(tml.Label.Sid)
+}
+
+const SE_GROUP_INTEGRITY = 0x00000020
+
+type TokenType uint32
+
+const (
+ TokenPrimary TokenType = 1
+ TokenImpersonation TokenType = 2
+)
+
+//sys GetProfilesDirectory(dir *uint16, dirLen *uint32) (err error) = userenv.GetProfilesDirectoryW
+
+const (
+ LG_INCLUDE_INDIRECT = 0x1
+ MAX_PREFERRED_LENGTH = 0xFFFFFFFF
+)
+
+type LocalGroupUserInfo0 struct {
+ Name *uint16
+}
+
+type UserInfo4 struct {
+ Name *uint16
+ Password *uint16
+ PasswordAge uint32
+ Priv uint32
+ HomeDir *uint16
+ Comment *uint16
+ Flags uint32
+ ScriptPath *uint16
+ AuthFlags uint32
+ FullName *uint16
+ UsrComment *uint16
+ Parms *uint16
+ Workstations *uint16
+ LastLogon uint32
+ LastLogoff uint32
+ AcctExpires uint32
+ MaxStorage uint32
+ UnitsPerWeek uint32
+ LogonHours *byte
+ BadPwCount uint32
+ NumLogons uint32
+ LogonServer *uint16
+ CountryCode uint32
+ CodePage uint32
+ UserSid *syscall.SID
+ PrimaryGroupID uint32
+ Profile *uint16
+ HomeDirDrive *uint16
+ PasswordExpired uint32
+}
+
+//sys NetUserGetLocalGroups(serverName *uint16, userName *uint16, level uint32, flags uint32, buf **byte, prefMaxLen uint32, entriesRead *uint32, totalEntries *uint32) (neterr error) = netapi32.NetUserGetLocalGroups
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go
new file mode 100644
index 000000000..b64d058d1
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/symlink_windows.go
@@ -0,0 +1,39 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+import "syscall"
+
+const (
+ ERROR_INVALID_PARAMETER syscall.Errno = 87
+
+ // symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972)
+ SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x2
+
+ // FileInformationClass values
+ FileBasicInfo = 0 // FILE_BASIC_INFO
+ FileStandardInfo = 1 // FILE_STANDARD_INFO
+ FileNameInfo = 2 // FILE_NAME_INFO
+ FileStreamInfo = 7 // FILE_STREAM_INFO
+ FileCompressionInfo = 8 // FILE_COMPRESSION_INFO
+ FileAttributeTagInfo = 9 // FILE_ATTRIBUTE_TAG_INFO
+ FileIdBothDirectoryInfo = 0xa // FILE_ID_BOTH_DIR_INFO
+ FileIdBothDirectoryRestartInfo = 0xb // FILE_ID_BOTH_DIR_INFO
+ FileRemoteProtocolInfo = 0xd // FILE_REMOTE_PROTOCOL_INFO
+ FileFullDirectoryInfo = 0xe // FILE_FULL_DIR_INFO
+ FileFullDirectoryRestartInfo = 0xf // FILE_FULL_DIR_INFO
+ FileStorageInfo = 0x10 // FILE_STORAGE_INFO
+ FileAlignmentInfo = 0x11 // FILE_ALIGNMENT_INFO
+ FileIdInfo = 0x12 // FILE_ID_INFO
+ FileIdExtdDirectoryInfo = 0x13 // FILE_ID_EXTD_DIR_INFO
+ FileIdExtdDirectoryRestartInfo = 0x14 // FILE_ID_EXTD_DIR_INFO
+)
+
+type FILE_ATTRIBUTE_TAG_INFO struct {
+ FileAttributes uint32
+ ReparseTag uint32
+}
+
+//sys GetFileInformationByHandleEx(handle syscall.Handle, class uint32, info *byte, bufsize uint32) (err error)
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go
new file mode 100644
index 000000000..121132f6f
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/syscall_windows.go
@@ -0,0 +1,307 @@
+// Copyright 2014 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package windows
+
+import (
+ "sync"
+ "syscall"
+ "unsafe"
+)
+
+const (
+ ERROR_SHARING_VIOLATION syscall.Errno = 32
+ ERROR_LOCK_VIOLATION syscall.Errno = 33
+ ERROR_NOT_SUPPORTED syscall.Errno = 50
+ ERROR_CALL_NOT_IMPLEMENTED syscall.Errno = 120
+ ERROR_INVALID_NAME syscall.Errno = 123
+ ERROR_LOCK_FAILED syscall.Errno = 167
+ ERROR_NO_UNICODE_TRANSLATION syscall.Errno = 1113
+)
+
+const GAA_FLAG_INCLUDE_PREFIX = 0x00000010
+
+const (
+ IF_TYPE_OTHER = 1
+ IF_TYPE_ETHERNET_CSMACD = 6
+ IF_TYPE_ISO88025_TOKENRING = 9
+ IF_TYPE_PPP = 23
+ IF_TYPE_SOFTWARE_LOOPBACK = 24
+ IF_TYPE_ATM = 37
+ IF_TYPE_IEEE80211 = 71
+ IF_TYPE_TUNNEL = 131
+ IF_TYPE_IEEE1394 = 144
+)
+
+type SocketAddress struct {
+ Sockaddr *syscall.RawSockaddrAny
+ SockaddrLength int32
+}
+
+type IpAdapterUnicastAddress struct {
+ Length uint32
+ Flags uint32
+ Next *IpAdapterUnicastAddress
+ Address SocketAddress
+ PrefixOrigin int32
+ SuffixOrigin int32
+ DadState int32
+ ValidLifetime uint32
+ PreferredLifetime uint32
+ LeaseLifetime uint32
+ OnLinkPrefixLength uint8
+}
+
+type IpAdapterAnycastAddress struct {
+ Length uint32
+ Flags uint32
+ Next *IpAdapterAnycastAddress
+ Address SocketAddress
+}
+
+type IpAdapterMulticastAddress struct {
+ Length uint32
+ Flags uint32
+ Next *IpAdapterMulticastAddress
+ Address SocketAddress
+}
+
+type IpAdapterDnsServerAdapter struct {
+ Length uint32
+ Reserved uint32
+ Next *IpAdapterDnsServerAdapter
+ Address SocketAddress
+}
+
+type IpAdapterPrefix struct {
+ Length uint32
+ Flags uint32
+ Next *IpAdapterPrefix
+ Address SocketAddress
+ PrefixLength uint32
+}
+
+type IpAdapterAddresses struct {
+ Length uint32
+ IfIndex uint32
+ Next *IpAdapterAddresses
+ AdapterName *byte
+ FirstUnicastAddress *IpAdapterUnicastAddress
+ FirstAnycastAddress *IpAdapterAnycastAddress
+ FirstMulticastAddress *IpAdapterMulticastAddress
+ FirstDnsServerAddress *IpAdapterDnsServerAdapter
+ DnsSuffix *uint16
+ Description *uint16
+ FriendlyName *uint16
+ PhysicalAddress [syscall.MAX_ADAPTER_ADDRESS_LENGTH]byte
+ PhysicalAddressLength uint32
+ Flags uint32
+ Mtu uint32
+ IfType uint32
+ OperStatus uint32
+ Ipv6IfIndex uint32
+ ZoneIndices [16]uint32
+ FirstPrefix *IpAdapterPrefix
+ /* more fields might be present here. */
+}
+
+const (
+ IfOperStatusUp = 1
+ IfOperStatusDown = 2
+ IfOperStatusTesting = 3
+ IfOperStatusUnknown = 4
+ IfOperStatusDormant = 5
+ IfOperStatusNotPresent = 6
+ IfOperStatusLowerLayerDown = 7
+)
+
+//sys GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) = iphlpapi.GetAdaptersAddresses
+//sys GetComputerNameEx(nameformat uint32, buf *uint16, n *uint32) (err error) = GetComputerNameExW
+//sys MoveFileEx(from *uint16, to *uint16, flags uint32) (err error) = MoveFileExW
+//sys GetModuleFileName(module syscall.Handle, fn *uint16, len uint32) (n uint32, err error) = kernel32.GetModuleFileNameW
+
+const (
+ WSA_FLAG_OVERLAPPED = 0x01
+ WSA_FLAG_NO_HANDLE_INHERIT = 0x80
+
+ WSAEMSGSIZE syscall.Errno = 10040
+
+ MSG_PEEK = 0x2
+ MSG_TRUNC = 0x0100
+ MSG_CTRUNC = 0x0200
+
+ socket_error = uintptr(^uint32(0))
+)
+
+var WSAID_WSASENDMSG = syscall.GUID{
+ Data1: 0xa441e712,
+ Data2: 0x754f,
+ Data3: 0x43ca,
+ Data4: [8]byte{0x84, 0xa7, 0x0d, 0xee, 0x44, 0xcf, 0x60, 0x6d},
+}
+
+var WSAID_WSARECVMSG = syscall.GUID{
+ Data1: 0xf689d7c8,
+ Data2: 0x6f1f,
+ Data3: 0x436b,
+ Data4: [8]byte{0x8a, 0x53, 0xe5, 0x4f, 0xe3, 0x51, 0xc3, 0x22},
+}
+
+var sendRecvMsgFunc struct {
+ once sync.Once
+ sendAddr uintptr
+ recvAddr uintptr
+ err error
+}
+
+type WSAMsg struct {
+ Name *syscall.RawSockaddrAny
+ Namelen int32
+ Buffers *syscall.WSABuf
+ BufferCount uint32
+ Control syscall.WSABuf
+ Flags uint32
+}
+
+//sys WSASocket(af int32, typ int32, protocol int32, protinfo *syscall.WSAProtocolInfo, group uint32, flags uint32) (handle syscall.Handle, err error) [failretval==syscall.InvalidHandle] = ws2_32.WSASocketW
+
+func loadWSASendRecvMsg() error {
+ sendRecvMsgFunc.once.Do(func() {
+ var s syscall.Handle
+ s, sendRecvMsgFunc.err = syscall.Socket(syscall.AF_INET, syscall.SOCK_DGRAM, syscall.IPPROTO_UDP)
+ if sendRecvMsgFunc.err != nil {
+ return
+ }
+ defer syscall.CloseHandle(s)
+ var n uint32
+ sendRecvMsgFunc.err = syscall.WSAIoctl(s,
+ syscall.SIO_GET_EXTENSION_FUNCTION_POINTER,
+ (*byte)(unsafe.Pointer(&WSAID_WSARECVMSG)),
+ uint32(unsafe.Sizeof(WSAID_WSARECVMSG)),
+ (*byte)(unsafe.Pointer(&sendRecvMsgFunc.recvAddr)),
+ uint32(unsafe.Sizeof(sendRecvMsgFunc.recvAddr)),
+ &n, nil, 0)
+ if sendRecvMsgFunc.err != nil {
+ return
+ }
+ sendRecvMsgFunc.err = syscall.WSAIoctl(s,
+ syscall.SIO_GET_EXTENSION_FUNCTION_POINTER,
+ (*byte)(unsafe.Pointer(&WSAID_WSASENDMSG)),
+ uint32(unsafe.Sizeof(WSAID_WSASENDMSG)),
+ (*byte)(unsafe.Pointer(&sendRecvMsgFunc.sendAddr)),
+ uint32(unsafe.Sizeof(sendRecvMsgFunc.sendAddr)),
+ &n, nil, 0)
+ })
+ return sendRecvMsgFunc.err
+}
+
+func WSASendMsg(fd syscall.Handle, msg *WSAMsg, flags uint32, bytesSent *uint32, overlapped *syscall.Overlapped, croutine *byte) error {
+ err := loadWSASendRecvMsg()
+ if err != nil {
+ return err
+ }
+ r1, _, e1 := syscall.Syscall6(sendRecvMsgFunc.sendAddr, 6, uintptr(fd), uintptr(unsafe.Pointer(msg)), uintptr(flags), uintptr(unsafe.Pointer(bytesSent)), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(croutine)))
+ if r1 == socket_error {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return err
+}
+
+func WSARecvMsg(fd syscall.Handle, msg *WSAMsg, bytesReceived *uint32, overlapped *syscall.Overlapped, croutine *byte) error {
+ err := loadWSASendRecvMsg()
+ if err != nil {
+ return err
+ }
+ r1, _, e1 := syscall.Syscall6(sendRecvMsgFunc.recvAddr, 5, uintptr(fd), uintptr(unsafe.Pointer(msg)), uintptr(unsafe.Pointer(bytesReceived)), uintptr(unsafe.Pointer(overlapped)), uintptr(unsafe.Pointer(croutine)), 0)
+ if r1 == socket_error {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return err
+}
+
+const (
+ ComputerNameNetBIOS = 0
+ ComputerNameDnsHostname = 1
+ ComputerNameDnsDomain = 2
+ ComputerNameDnsFullyQualified = 3
+ ComputerNamePhysicalNetBIOS = 4
+ ComputerNamePhysicalDnsHostname = 5
+ ComputerNamePhysicalDnsDomain = 6
+ ComputerNamePhysicalDnsFullyQualified = 7
+ ComputerNameMax = 8
+
+ MOVEFILE_REPLACE_EXISTING = 0x1
+ MOVEFILE_COPY_ALLOWED = 0x2
+ MOVEFILE_DELAY_UNTIL_REBOOT = 0x4
+ MOVEFILE_WRITE_THROUGH = 0x8
+ MOVEFILE_CREATE_HARDLINK = 0x10
+ MOVEFILE_FAIL_IF_NOT_TRACKABLE = 0x20
+)
+
+func Rename(oldpath, newpath string) error {
+ from, err := syscall.UTF16PtrFromString(oldpath)
+ if err != nil {
+ return err
+ }
+ to, err := syscall.UTF16PtrFromString(newpath)
+ if err != nil {
+ return err
+ }
+ return MoveFileEx(from, to, MOVEFILE_REPLACE_EXISTING)
+}
+
+//sys LockFileEx(file syscall.Handle, flags uint32, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) = kernel32.LockFileEx
+//sys UnlockFileEx(file syscall.Handle, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) = kernel32.UnlockFileEx
+
+const (
+ LOCKFILE_FAIL_IMMEDIATELY = 0x00000001
+ LOCKFILE_EXCLUSIVE_LOCK = 0x00000002
+)
+
+const MB_ERR_INVALID_CHARS = 8
+
+//sys GetACP() (acp uint32) = kernel32.GetACP
+//sys GetConsoleCP() (ccp uint32) = kernel32.GetConsoleCP
+//sys MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) = kernel32.MultiByteToWideChar
+//sys GetCurrentThread() (pseudoHandle syscall.Handle, err error) = kernel32.GetCurrentThread
+
+const STYPE_DISKTREE = 0x00
+
+type SHARE_INFO_2 struct {
+ Netname *uint16
+ Type uint32
+ Remark *uint16
+ Permissions uint32
+ MaxUses uint32
+ CurrentUses uint32
+ Path *uint16
+ Passwd *uint16
+}
+
+//sys NetShareAdd(serverName *uint16, level uint32, buf *byte, parmErr *uint16) (neterr error) = netapi32.NetShareAdd
+//sys NetShareDel(serverName *uint16, netName *uint16, reserved uint32) (neterr error) = netapi32.NetShareDel
+
+const (
+ FILE_NAME_NORMALIZED = 0x0
+ FILE_NAME_OPENED = 0x8
+
+ VOLUME_NAME_DOS = 0x0
+ VOLUME_NAME_GUID = 0x1
+ VOLUME_NAME_NONE = 0x4
+ VOLUME_NAME_NT = 0x2
+)
+
+//sys GetFinalPathNameByHandle(file syscall.Handle, filePath *uint16, filePathSize uint32, flags uint32) (n uint32, err error) = kernel32.GetFinalPathNameByHandleW
+
+func LoadGetFinalPathNameByHandle() error {
+ return procGetFinalPathNameByHandleW.Find()
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go
new file mode 100644
index 000000000..4e0018f38
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll/sysdll.go
@@ -0,0 +1,28 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package sysdll is an internal leaf package that records and reports
+// which Windows DLL names are used by Go itself. These DLLs are then
+// only loaded from the System32 directory. See Issue 14959.
+package sysdll
+
+// IsSystemDLL reports whether the named dll key (a base name, like
+// "foo.dll") is a system DLL which should only be loaded from the
+// Windows SYSTEM32 directory.
+//
+// Filenames are case sensitive, but that doesn't matter because
+// the case registered with Add is also the same case used with
+// LoadDLL later.
+//
+// It has no associated mutex and should only be mutated serially
+// (currently: during init), and not concurrent with DLL loading.
+var IsSystemDLL = map[string]bool{}
+
+// Add notes that dll is a system32 DLL which should only be loaded
+// from the Windows SYSTEM32 directory. It returns its argument back,
+// for ease of use in generated code.
+func Add(dll string) string {
+ IsSystemDLL[dll] = true
+ return dll
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go
new file mode 100644
index 000000000..3ed2d9fe0
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/internal/syscall/windows/zsyscall_windows.go
@@ -0,0 +1,363 @@
+// Code generated by 'go generate'; DO NOT EDIT.
+
+package windows
+
+import (
+ "syscall"
+ "unsafe"
+
+ "github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll"
+)
+
+var _ unsafe.Pointer
+
+// Do the interface allocations only once for common
+// Errno values.
+const (
+ errnoERROR_IO_PENDING = 997
+)
+
+var (
+ errERROR_IO_PENDING error = syscall.Errno(errnoERROR_IO_PENDING)
+)
+
+// errnoErr returns common boxed Errno values, to prevent
+// allocations at runtime.
+func errnoErr(e syscall.Errno) error {
+ switch e {
+ case 0:
+ return nil
+ case errnoERROR_IO_PENDING:
+ return errERROR_IO_PENDING
+ }
+ // TODO: add more here, after collecting data on the common
+ // error values see on Windows. (perhaps when running
+ // all.bat?)
+ return e
+}
+
+var (
+ modiphlpapi = syscall.NewLazyDLL(sysdll.Add("iphlpapi.dll"))
+ modkernel32 = syscall.NewLazyDLL(sysdll.Add("kernel32.dll"))
+ modws2_32 = syscall.NewLazyDLL(sysdll.Add("ws2_32.dll"))
+ modnetapi32 = syscall.NewLazyDLL(sysdll.Add("netapi32.dll"))
+ modadvapi32 = syscall.NewLazyDLL(sysdll.Add("advapi32.dll"))
+ moduserenv = syscall.NewLazyDLL(sysdll.Add("userenv.dll"))
+ modpsapi = syscall.NewLazyDLL(sysdll.Add("psapi.dll"))
+
+ procGetAdaptersAddresses = modiphlpapi.NewProc("GetAdaptersAddresses")
+ procGetComputerNameExW = modkernel32.NewProc("GetComputerNameExW")
+ procMoveFileExW = modkernel32.NewProc("MoveFileExW")
+ procGetModuleFileNameW = modkernel32.NewProc("GetModuleFileNameW")
+ procWSASocketW = modws2_32.NewProc("WSASocketW")
+ procLockFileEx = modkernel32.NewProc("LockFileEx")
+ procUnlockFileEx = modkernel32.NewProc("UnlockFileEx")
+ procGetACP = modkernel32.NewProc("GetACP")
+ procGetConsoleCP = modkernel32.NewProc("GetConsoleCP")
+ procMultiByteToWideChar = modkernel32.NewProc("MultiByteToWideChar")
+ procGetCurrentThread = modkernel32.NewProc("GetCurrentThread")
+ procNetShareAdd = modnetapi32.NewProc("NetShareAdd")
+ procNetShareDel = modnetapi32.NewProc("NetShareDel")
+ procGetFinalPathNameByHandleW = modkernel32.NewProc("GetFinalPathNameByHandleW")
+ procImpersonateSelf = modadvapi32.NewProc("ImpersonateSelf")
+ procRevertToSelf = modadvapi32.NewProc("RevertToSelf")
+ procOpenThreadToken = modadvapi32.NewProc("OpenThreadToken")
+ procLookupPrivilegeValueW = modadvapi32.NewProc("LookupPrivilegeValueW")
+ procAdjustTokenPrivileges = modadvapi32.NewProc("AdjustTokenPrivileges")
+ procDuplicateTokenEx = modadvapi32.NewProc("DuplicateTokenEx")
+ procSetTokenInformation = modadvapi32.NewProc("SetTokenInformation")
+ procGetProfilesDirectoryW = moduserenv.NewProc("GetProfilesDirectoryW")
+ procNetUserGetLocalGroups = modnetapi32.NewProc("NetUserGetLocalGroups")
+ procGetProcessMemoryInfo = modpsapi.NewProc("GetProcessMemoryInfo")
+ procGetFileInformationByHandleEx = modkernel32.NewProc("GetFileInformationByHandleEx")
+)
+
+func GetAdaptersAddresses(family uint32, flags uint32, reserved uintptr, adapterAddresses *IpAdapterAddresses, sizePointer *uint32) (errcode error) {
+ r0, _, _ := syscall.Syscall6(procGetAdaptersAddresses.Addr(), 5, uintptr(family), uintptr(flags), uintptr(reserved), uintptr(unsafe.Pointer(adapterAddresses)), uintptr(unsafe.Pointer(sizePointer)), 0)
+ if r0 != 0 {
+ errcode = syscall.Errno(r0)
+ }
+ return
+}
+
+func GetComputerNameEx(nameformat uint32, buf *uint16, n *uint32) (err error) {
+ r1, _, e1 := syscall.Syscall(procGetComputerNameExW.Addr(), 3, uintptr(nameformat), uintptr(unsafe.Pointer(buf)), uintptr(unsafe.Pointer(n)))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func MoveFileEx(from *uint16, to *uint16, flags uint32) (err error) {
+ r1, _, e1 := syscall.Syscall(procMoveFileExW.Addr(), 3, uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(to)), uintptr(flags))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func GetModuleFileName(module syscall.Handle, fn *uint16, len uint32) (n uint32, err error) {
+ r0, _, e1 := syscall.Syscall(procGetModuleFileNameW.Addr(), 3, uintptr(module), uintptr(unsafe.Pointer(fn)), uintptr(len))
+ n = uint32(r0)
+ if n == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func WSASocket(af int32, typ int32, protocol int32, protinfo *syscall.WSAProtocolInfo, group uint32, flags uint32) (handle syscall.Handle, err error) {
+ r0, _, e1 := syscall.Syscall6(procWSASocketW.Addr(), 6, uintptr(af), uintptr(typ), uintptr(protocol), uintptr(unsafe.Pointer(protinfo)), uintptr(group), uintptr(flags))
+ handle = syscall.Handle(r0)
+ if handle == syscall.InvalidHandle {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func LockFileEx(file syscall.Handle, flags uint32, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) {
+ r1, _, e1 := syscall.Syscall6(procLockFileEx.Addr(), 6, uintptr(file), uintptr(flags), uintptr(reserved), uintptr(bytesLow), uintptr(bytesHigh), uintptr(unsafe.Pointer(overlapped)))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func UnlockFileEx(file syscall.Handle, reserved uint32, bytesLow uint32, bytesHigh uint32, overlapped *syscall.Overlapped) (err error) {
+ r1, _, e1 := syscall.Syscall6(procUnlockFileEx.Addr(), 5, uintptr(file), uintptr(reserved), uintptr(bytesLow), uintptr(bytesHigh), uintptr(unsafe.Pointer(overlapped)), 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func GetACP() (acp uint32) {
+ r0, _, _ := syscall.Syscall(procGetACP.Addr(), 0, 0, 0, 0)
+ acp = uint32(r0)
+ return
+}
+
+func GetConsoleCP() (ccp uint32) {
+ r0, _, _ := syscall.Syscall(procGetConsoleCP.Addr(), 0, 0, 0, 0)
+ ccp = uint32(r0)
+ return
+}
+
+func MultiByteToWideChar(codePage uint32, dwFlags uint32, str *byte, nstr int32, wchar *uint16, nwchar int32) (nwrite int32, err error) {
+ r0, _, e1 := syscall.Syscall6(procMultiByteToWideChar.Addr(), 6, uintptr(codePage), uintptr(dwFlags), uintptr(unsafe.Pointer(str)), uintptr(nstr), uintptr(unsafe.Pointer(wchar)), uintptr(nwchar))
+ nwrite = int32(r0)
+ if nwrite == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func GetCurrentThread() (pseudoHandle syscall.Handle, err error) {
+ r0, _, e1 := syscall.Syscall(procGetCurrentThread.Addr(), 0, 0, 0, 0)
+ pseudoHandle = syscall.Handle(r0)
+ if pseudoHandle == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func NetShareAdd(serverName *uint16, level uint32, buf *byte, parmErr *uint16) (neterr error) {
+ r0, _, _ := syscall.Syscall6(procNetShareAdd.Addr(), 4, uintptr(unsafe.Pointer(serverName)), uintptr(level), uintptr(unsafe.Pointer(buf)), uintptr(unsafe.Pointer(parmErr)), 0, 0)
+ if r0 != 0 {
+ neterr = syscall.Errno(r0)
+ }
+ return
+}
+
+func NetShareDel(serverName *uint16, netName *uint16, reserved uint32) (neterr error) {
+ r0, _, _ := syscall.Syscall(procNetShareDel.Addr(), 3, uintptr(unsafe.Pointer(serverName)), uintptr(unsafe.Pointer(netName)), uintptr(reserved))
+ if r0 != 0 {
+ neterr = syscall.Errno(r0)
+ }
+ return
+}
+
+func GetFinalPathNameByHandle(file syscall.Handle, filePath *uint16, filePathSize uint32, flags uint32) (n uint32, err error) {
+ r0, _, e1 := syscall.Syscall6(procGetFinalPathNameByHandleW.Addr(), 4, uintptr(file), uintptr(unsafe.Pointer(filePath)), uintptr(filePathSize), uintptr(flags), 0, 0)
+ n = uint32(r0)
+ if n == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func ImpersonateSelf(impersonationlevel uint32) (err error) {
+ r1, _, e1 := syscall.Syscall(procImpersonateSelf.Addr(), 1, uintptr(impersonationlevel), 0, 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func RevertToSelf() (err error) {
+ r1, _, e1 := syscall.Syscall(procRevertToSelf.Addr(), 0, 0, 0, 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func OpenThreadToken(h syscall.Handle, access uint32, openasself bool, token *syscall.Token) (err error) {
+ var _p0 uint32
+ if openasself {
+ _p0 = 1
+ } else {
+ _p0 = 0
+ }
+ r1, _, e1 := syscall.Syscall6(procOpenThreadToken.Addr(), 4, uintptr(h), uintptr(access), uintptr(_p0), uintptr(unsafe.Pointer(token)), 0, 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func LookupPrivilegeValue(systemname *uint16, name *uint16, luid *LUID) (err error) {
+ r1, _, e1 := syscall.Syscall(procLookupPrivilegeValueW.Addr(), 3, uintptr(unsafe.Pointer(systemname)), uintptr(unsafe.Pointer(name)), uintptr(unsafe.Pointer(luid)))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func adjustTokenPrivileges(token syscall.Token, disableAllPrivileges bool, newstate *TOKEN_PRIVILEGES, buflen uint32, prevstate *TOKEN_PRIVILEGES, returnlen *uint32) (ret uint32, err error) {
+ var _p0 uint32
+ if disableAllPrivileges {
+ _p0 = 1
+ } else {
+ _p0 = 0
+ }
+ r0, _, e1 := syscall.Syscall6(procAdjustTokenPrivileges.Addr(), 6, uintptr(token), uintptr(_p0), uintptr(unsafe.Pointer(newstate)), uintptr(buflen), uintptr(unsafe.Pointer(prevstate)), uintptr(unsafe.Pointer(returnlen)))
+ ret = uint32(r0)
+ if true {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func DuplicateTokenEx(hExistingToken syscall.Token, dwDesiredAccess uint32, lpTokenAttributes *syscall.SecurityAttributes, impersonationLevel uint32, tokenType TokenType, phNewToken *syscall.Token) (err error) {
+ r1, _, e1 := syscall.Syscall6(procDuplicateTokenEx.Addr(), 6, uintptr(hExistingToken), uintptr(dwDesiredAccess), uintptr(unsafe.Pointer(lpTokenAttributes)), uintptr(impersonationLevel), uintptr(tokenType), uintptr(unsafe.Pointer(phNewToken)))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func SetTokenInformation(tokenHandle syscall.Token, tokenInformationClass uint32, tokenInformation uintptr, tokenInformationLength uint32) (err error) {
+ r1, _, e1 := syscall.Syscall6(procSetTokenInformation.Addr(), 4, uintptr(tokenHandle), uintptr(tokenInformationClass), uintptr(tokenInformation), uintptr(tokenInformationLength), 0, 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func GetProfilesDirectory(dir *uint16, dirLen *uint32) (err error) {
+ r1, _, e1 := syscall.Syscall(procGetProfilesDirectoryW.Addr(), 2, uintptr(unsafe.Pointer(dir)), uintptr(unsafe.Pointer(dirLen)), 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func NetUserGetLocalGroups(serverName *uint16, userName *uint16, level uint32, flags uint32, buf **byte, prefMaxLen uint32, entriesRead *uint32, totalEntries *uint32) (neterr error) {
+ r0, _, _ := syscall.Syscall9(procNetUserGetLocalGroups.Addr(), 8, uintptr(unsafe.Pointer(serverName)), uintptr(unsafe.Pointer(userName)), uintptr(level), uintptr(flags), uintptr(unsafe.Pointer(buf)), uintptr(prefMaxLen), uintptr(unsafe.Pointer(entriesRead)), uintptr(unsafe.Pointer(totalEntries)), 0)
+ if r0 != 0 {
+ neterr = syscall.Errno(r0)
+ }
+ return
+}
+
+func GetProcessMemoryInfo(handle syscall.Handle, memCounters *PROCESS_MEMORY_COUNTERS, cb uint32) (err error) {
+ r1, _, e1 := syscall.Syscall(procGetProcessMemoryInfo.Addr(), 3, uintptr(handle), uintptr(unsafe.Pointer(memCounters)), uintptr(cb))
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
+
+func GetFileInformationByHandleEx(handle syscall.Handle, class uint32, info *byte, bufsize uint32) (err error) {
+ r1, _, e1 := syscall.Syscall6(procGetFileInformationByHandleEx.Addr(), 4, uintptr(handle), uintptr(class), uintptr(unsafe.Pointer(info)), uintptr(bufsize), 0, 0)
+ if r1 == 0 {
+ if e1 != 0 {
+ err = errnoErr(e1)
+ } else {
+ err = syscall.EINVAL
+ }
+ }
+ return
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go
new file mode 100644
index 000000000..05f27c321
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock.go
@@ -0,0 +1,99 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package filelock provides a platform-independent API for advisory file
+// locking. Calls to functions in this package on platforms that do not support
+// advisory locks will return errors for which IsNotSupported returns true.
+package filelock
+
+import (
+ "errors"
+ "io/fs"
+ "os"
+)
+
+// A File provides the minimal set of methods required to lock an open file.
+// File implementations must be usable as map keys.
+// The usual implementation is *os.File.
+type File interface {
+ // Name returns the name of the file.
+ Name() string
+
+ // Fd returns a valid file descriptor.
+ // (If the File is an *os.File, it must not be closed.)
+ Fd() uintptr
+
+ // Stat returns the FileInfo structure describing file.
+ Stat() (fs.FileInfo, error)
+}
+
+// Lock places an advisory write lock on the file, blocking until it can be
+// locked.
+//
+// If Lock returns nil, no other process will be able to place a read or write
+// lock on the file until this process exits, closes f, or calls Unlock on it.
+//
+// If f's descriptor is already read- or write-locked, the behavior of Lock is
+// unspecified.
+//
+// Closing the file may or may not release the lock promptly. Callers should
+// ensure that Unlock is always called when Lock succeeds.
+func Lock(f File) error {
+ return lock(f, writeLock)
+}
+
+// RLock places an advisory read lock on the file, blocking until it can be locked.
+//
+// If RLock returns nil, no other process will be able to place a write lock on
+// the file until this process exits, closes f, or calls Unlock on it.
+//
+// If f is already read- or write-locked, the behavior of RLock is unspecified.
+//
+// Closing the file may or may not release the lock promptly. Callers should
+// ensure that Unlock is always called if RLock succeeds.
+func RLock(f File) error {
+ return lock(f, readLock)
+}
+
+// Unlock removes an advisory lock placed on f by this process.
+//
+// The caller must not attempt to unlock a file that is not locked.
+func Unlock(f File) error {
+ return unlock(f)
+}
+
+// String returns the name of the function corresponding to lt
+// (Lock, RLock, or Unlock).
+func (lt lockType) String() string {
+ switch lt {
+ case readLock:
+ return "RLock"
+ case writeLock:
+ return "Lock"
+ default:
+ return "Unlock"
+ }
+}
+
+// IsNotSupported returns a boolean indicating whether the error is known to
+// report that a function is not supported (possibly for a specific input).
+// It is satisfied by ErrNotSupported as well as some syscall errors.
+func IsNotSupported(err error) bool {
+ return isNotSupported(underlyingError(err))
+}
+
+var ErrNotSupported = errors.New("operation not supported")
+
+// underlyingError returns the underlying error for known os error types.
+func underlyingError(err error) error {
+ switch err := err.(type) {
+ case *fs.PathError:
+ return err.Err
+ case *os.LinkError:
+ return err.Err
+ case *os.SyscallError:
+ return err.Err
+ }
+ return err
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go
new file mode 100644
index 000000000..856804850
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_fcntl.go
@@ -0,0 +1,214 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build aix || (solaris && !illumos)
+
+// This code implements the filelock API using POSIX 'fcntl' locks, which attach
+// to an (inode, process) pair rather than a file descriptor. To avoid unlocking
+// files prematurely when the same file is opened through different descriptors,
+// we allow only one read-lock at a time.
+//
+// Most platforms provide some alternative API, such as an 'flock' system call
+// or an F_OFD_SETLK command for 'fcntl', that allows for better concurrency and
+// does not require per-inode bookkeeping in the application.
+
+package filelock
+
+import (
+ "errors"
+ "io"
+ "io/fs"
+ "math/rand"
+ "sync"
+ "syscall"
+ "time"
+)
+
+type lockType int16
+
+const (
+ readLock lockType = syscall.F_RDLCK
+ writeLock lockType = syscall.F_WRLCK
+)
+
+type inode = uint64 // type of syscall.Stat_t.Ino
+
+type inodeLock struct {
+ owner File
+ queue []<-chan File
+}
+
+var (
+ mu sync.Mutex
+ inodes = map[File]inode{}
+ locks = map[inode]inodeLock{}
+)
+
+func lock(f File, lt lockType) (err error) {
+ // POSIX locks apply per inode and process, and the lock for an inode is
+ // released when *any* descriptor for that inode is closed. So we need to
+ // synchronize access to each inode internally, and must serialize lock and
+ // unlock calls that refer to the same inode through different descriptors.
+ fi, err := f.Stat()
+ if err != nil {
+ return err
+ }
+ ino := fi.Sys().(*syscall.Stat_t).Ino
+
+ mu.Lock()
+ if i, dup := inodes[f]; dup && i != ino {
+ mu.Unlock()
+ return &fs.PathError{
+ Op: lt.String(),
+ Path: f.Name(),
+ Err: errors.New("inode for file changed since last Lock or RLock"),
+ }
+ }
+ inodes[f] = ino
+
+ var wait chan File
+ l := locks[ino]
+ if l.owner == f {
+ // This file already owns the lock, but the call may change its lock type.
+ } else if l.owner == nil {
+ // No owner: it's ours now.
+ l.owner = f
+ } else {
+ // Already owned: add a channel to wait on.
+ wait = make(chan File)
+ l.queue = append(l.queue, wait)
+ }
+ locks[ino] = l
+ mu.Unlock()
+
+ if wait != nil {
+ wait <- f
+ }
+
+ // Spurious EDEADLK errors arise on platforms that compute deadlock graphs at
+ // the process, rather than thread, level. Consider processes P and Q, with
+ // threads P.1, P.2, and Q.3. The following trace is NOT a deadlock, but will be
+ // reported as a deadlock on systems that consider only process granularity:
+ //
+ // P.1 locks file A.
+ // Q.3 locks file B.
+ // Q.3 blocks on file A.
+ // P.2 blocks on file B. (This is erroneously reported as a deadlock.)
+ // P.1 unlocks file A.
+ // Q.3 unblocks and locks file A.
+ // Q.3 unlocks files A and B.
+ // P.2 unblocks and locks file B.
+ // P.2 unlocks file B.
+ //
+ // These spurious errors were observed in practice on AIX and Solaris in
+ // cmd/go: see https://golang.org/issue/32817.
+ //
+ // We work around this bug by treating EDEADLK as always spurious. If there
+ // really is a lock-ordering bug between the interacting processes, it will
+ // become a livelock instead, but that's not appreciably worse than if we had
+ // a proper flock implementation (which generally does not even attempt to
+ // diagnose deadlocks).
+ //
+ // In the above example, that changes the trace to:
+ //
+ // P.1 locks file A.
+ // Q.3 locks file B.
+ // Q.3 blocks on file A.
+ // P.2 spuriously fails to lock file B and goes to sleep.
+ // P.1 unlocks file A.
+ // Q.3 unblocks and locks file A.
+ // Q.3 unlocks files A and B.
+ // P.2 wakes up and locks file B.
+ // P.2 unlocks file B.
+ //
+ // We know that the retry loop will not introduce a *spurious* livelock
+ // because, according to the POSIX specification, EDEADLK is only to be
+ // returned when “the lock is blocked by a lock from another process”.
+ // If that process is blocked on some lock that we are holding, then the
+ // resulting livelock is due to a real deadlock (and would manifest as such
+ // when using, for example, the flock implementation of this package).
+ // If the other process is *not* blocked on some other lock that we are
+ // holding, then it will eventually release the requested lock.
+
+ nextSleep := 1 * time.Millisecond
+ const maxSleep = 500 * time.Millisecond
+ for {
+ err = setlkw(f.Fd(), lt)
+ if err != syscall.EDEADLK {
+ break
+ }
+ time.Sleep(nextSleep)
+
+ nextSleep += nextSleep
+ if nextSleep > maxSleep {
+ nextSleep = maxSleep
+ }
+ // Apply 10% jitter to avoid synchronizing collisions when we finally unblock.
+ nextSleep += time.Duration((0.1*rand.Float64() - 0.05) * float64(nextSleep))
+ }
+
+ if err != nil {
+ unlock(f)
+ return &fs.PathError{
+ Op: lt.String(),
+ Path: f.Name(),
+ Err: err,
+ }
+ }
+
+ return nil
+}
+
+func unlock(f File) error {
+ var owner File
+
+ mu.Lock()
+ ino, ok := inodes[f]
+ if ok {
+ owner = locks[ino].owner
+ }
+ mu.Unlock()
+
+ if owner != f {
+ panic("unlock called on a file that is not locked")
+ }
+
+ err := setlkw(f.Fd(), syscall.F_UNLCK)
+
+ mu.Lock()
+ l := locks[ino]
+ if len(l.queue) == 0 {
+ // No waiters: remove the map entry.
+ delete(locks, ino)
+ } else {
+ // The first waiter is sending us their file now.
+ // Receive it and update the queue.
+ l.owner = <-l.queue[0]
+ l.queue = l.queue[1:]
+ locks[ino] = l
+ }
+ delete(inodes, f)
+ mu.Unlock()
+
+ return err
+}
+
+// setlkw calls FcntlFlock with F_SETLKW for the entire file indicated by fd.
+func setlkw(fd uintptr, lt lockType) error {
+ for {
+ err := syscall.FcntlFlock(fd, syscall.F_SETLKW, &syscall.Flock_t{
+ Type: int16(lt),
+ Whence: io.SeekStart,
+ Start: 0,
+ Len: 0, // All bytes.
+ })
+ if err != syscall.EINTR {
+ return err
+ }
+ }
+}
+
+func isNotSupported(err error) bool {
+ return err == syscall.ENOSYS || err == syscall.ENOTSUP || err == syscall.EOPNOTSUPP || err == ErrNotSupported
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go
new file mode 100644
index 000000000..7bdd62bd9
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_other.go
@@ -0,0 +1,36 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !unix && !windows
+
+package filelock
+
+import "io/fs"
+
+type lockType int8
+
+const (
+ readLock = iota + 1
+ writeLock
+)
+
+func lock(f File, lt lockType) error {
+ return &fs.PathError{
+ Op: lt.String(),
+ Path: f.Name(),
+ Err: ErrNotSupported,
+ }
+}
+
+func unlock(f File) error {
+ return &fs.PathError{
+ Op: "Unlock",
+ Path: f.Name(),
+ Err: ErrNotSupported,
+ }
+}
+
+func isNotSupported(err error) bool {
+ return err == ErrNotSupported
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go
new file mode 100644
index 000000000..d7778d05d
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_unix.go
@@ -0,0 +1,44 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build darwin || dragonfly || freebsd || illumos || linux || netbsd || openbsd
+
+package filelock
+
+import (
+ "io/fs"
+ "syscall"
+)
+
+type lockType int16
+
+const (
+ readLock lockType = syscall.LOCK_SH
+ writeLock lockType = syscall.LOCK_EX
+)
+
+func lock(f File, lt lockType) (err error) {
+ for {
+ err = syscall.Flock(int(f.Fd()), int(lt))
+ if err != syscall.EINTR {
+ break
+ }
+ }
+ if err != nil {
+ return &fs.PathError{
+ Op: lt.String(),
+ Path: f.Name(),
+ Err: err,
+ }
+ }
+ return nil
+}
+
+func unlock(f File) error {
+ return lock(f, syscall.LOCK_UN)
+}
+
+func isNotSupported(err error) bool {
+ return err == syscall.ENOSYS || err == syscall.ENOTSUP || err == syscall.EOPNOTSUPP || err == ErrNotSupported
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go
new file mode 100644
index 000000000..ceab65b02
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/internal/filelock/filelock_windows.go
@@ -0,0 +1,67 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build windows
+
+package filelock
+
+import (
+ "io/fs"
+ "syscall"
+
+ "github.com/rogpeppe/go-internal/internal/syscall/windows"
+)
+
+type lockType uint32
+
+const (
+ readLock lockType = 0
+ writeLock lockType = windows.LOCKFILE_EXCLUSIVE_LOCK
+)
+
+const (
+ reserved = 0
+ allBytes = ^uint32(0)
+)
+
+func lock(f File, lt lockType) error {
+ // Per https://golang.org/issue/19098, “Programs currently expect the Fd
+ // method to return a handle that uses ordinary synchronous I/O.”
+ // However, LockFileEx still requires an OVERLAPPED structure,
+ // which contains the file offset of the beginning of the lock range.
+ // We want to lock the entire file, so we leave the offset as zero.
+ ol := new(syscall.Overlapped)
+
+ err := windows.LockFileEx(syscall.Handle(f.Fd()), uint32(lt), reserved, allBytes, allBytes, ol)
+ if err != nil {
+ return &fs.PathError{
+ Op: lt.String(),
+ Path: f.Name(),
+ Err: err,
+ }
+ }
+ return nil
+}
+
+func unlock(f File) error {
+ ol := new(syscall.Overlapped)
+ err := windows.UnlockFileEx(syscall.Handle(f.Fd()), reserved, allBytes, allBytes, ol)
+ if err != nil {
+ return &fs.PathError{
+ Op: "Unlock",
+ Path: f.Name(),
+ Err: err,
+ }
+ }
+ return nil
+}
+
+func isNotSupported(err error) bool {
+ switch err {
+ case windows.ERROR_NOT_SUPPORTED, windows.ERROR_CALL_NOT_IMPLEMENTED, ErrNotSupported:
+ return true
+ default:
+ return false
+ }
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go
new file mode 100644
index 000000000..82e1a8967
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile.go
@@ -0,0 +1,187 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package lockedfile creates and manipulates files whose contents should only
+// change atomically.
+package lockedfile
+
+import (
+ "fmt"
+ "io"
+ "io/fs"
+ "os"
+ "runtime"
+)
+
+// A File is a locked *os.File.
+//
+// Closing the file releases the lock.
+//
+// If the program exits while a file is locked, the operating system releases
+// the lock but may not do so promptly: callers must ensure that all locked
+// files are closed before exiting.
+type File struct {
+ osFile
+ closed bool
+}
+
+// osFile embeds a *os.File while keeping the pointer itself unexported.
+// (When we close a File, it must be the same file descriptor that we opened!)
+type osFile struct {
+ *os.File
+}
+
+// OpenFile is like os.OpenFile, but returns a locked file.
+// If flag includes os.O_WRONLY or os.O_RDWR, the file is write-locked;
+// otherwise, it is read-locked.
+func OpenFile(name string, flag int, perm fs.FileMode) (*File, error) {
+ var (
+ f = new(File)
+ err error
+ )
+ f.osFile.File, err = openFile(name, flag, perm)
+ if err != nil {
+ return nil, err
+ }
+
+ // Although the operating system will drop locks for open files when the go
+ // command exits, we want to hold locks for as little time as possible, and we
+ // especially don't want to leave a file locked after we're done with it. Our
+ // Close method is what releases the locks, so use a finalizer to report
+ // missing Close calls on a best-effort basis.
+ runtime.SetFinalizer(f, func(f *File) {
+ panic(fmt.Sprintf("lockedfile.File %s became unreachable without a call to Close", f.Name()))
+ })
+
+ return f, nil
+}
+
+// Open is like os.Open, but returns a read-locked file.
+func Open(name string) (*File, error) {
+ return OpenFile(name, os.O_RDONLY, 0)
+}
+
+// Create is like os.Create, but returns a write-locked file.
+func Create(name string) (*File, error) {
+ return OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0666)
+}
+
+// Edit creates the named file with mode 0666 (before umask),
+// but does not truncate existing contents.
+//
+// If Edit succeeds, methods on the returned File can be used for I/O.
+// The associated file descriptor has mode O_RDWR and the file is write-locked.
+func Edit(name string) (*File, error) {
+ return OpenFile(name, os.O_RDWR|os.O_CREATE, 0666)
+}
+
+// Close unlocks and closes the underlying file.
+//
+// Close may be called multiple times; all calls after the first will return a
+// non-nil error.
+func (f *File) Close() error {
+ if f.closed {
+ return &fs.PathError{
+ Op: "close",
+ Path: f.Name(),
+ Err: fs.ErrClosed,
+ }
+ }
+ f.closed = true
+
+ err := closeFile(f.osFile.File)
+ runtime.SetFinalizer(f, nil)
+ return err
+}
+
+// Read opens the named file with a read-lock and returns its contents.
+func Read(name string) ([]byte, error) {
+ f, err := Open(name)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ return io.ReadAll(f)
+}
+
+// Write opens the named file (creating it with the given permissions if needed),
+// then write-locks it and overwrites it with the given content.
+func Write(name string, content io.Reader, perm fs.FileMode) (err error) {
+ f, err := OpenFile(name, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, perm)
+ if err != nil {
+ return err
+ }
+
+ _, err = io.Copy(f, content)
+ if closeErr := f.Close(); err == nil {
+ err = closeErr
+ }
+ return err
+}
+
+// Transform invokes t with the result of reading the named file, with its lock
+// still held.
+//
+// If t returns a nil error, Transform then writes the returned contents back to
+// the file, making a best effort to preserve existing contents on error.
+//
+// t must not modify the slice passed to it.
+func Transform(name string, t func([]byte) ([]byte, error)) (err error) {
+ f, err := Edit(name)
+ if err != nil {
+ return err
+ }
+ defer f.Close()
+
+ old, err := io.ReadAll(f)
+ if err != nil {
+ return err
+ }
+
+ new, err := t(old)
+ if err != nil {
+ return err
+ }
+
+ if len(new) > len(old) {
+ // The overall file size is increasing, so write the tail first: if we're
+ // about to run out of space on the disk, we would rather detect that
+ // failure before we have overwritten the original contents.
+ if _, err := f.WriteAt(new[len(old):], int64(len(old))); err != nil {
+ // Make a best effort to remove the incomplete tail.
+ f.Truncate(int64(len(old)))
+ return err
+ }
+ }
+
+ // We're about to overwrite the old contents. In case of failure, make a best
+ // effort to roll back before we close the file.
+ defer func() {
+ if err != nil {
+ if _, err := f.WriteAt(old, 0); err == nil {
+ f.Truncate(int64(len(old)))
+ }
+ }
+ }()
+
+ if len(new) >= len(old) {
+ if _, err := f.WriteAt(new[:len(old)], 0); err != nil {
+ return err
+ }
+ } else {
+ if _, err := f.WriteAt(new, 0); err != nil {
+ return err
+ }
+ // The overall file size is decreasing, so shrink the file to its final size
+ // after writing. We do this after writing (instead of before) so that if
+ // the write fails, enough filesystem space will likely still be reserved
+ // to contain the previous contents.
+ if err := f.Truncate(int64(len(new))); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go
new file mode 100644
index 000000000..454c3a42c
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_filelock.go
@@ -0,0 +1,65 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build !plan9
+
+package lockedfile
+
+import (
+ "io/fs"
+ "os"
+
+ "github.com/rogpeppe/go-internal/lockedfile/internal/filelock"
+)
+
+func openFile(name string, flag int, perm fs.FileMode) (*os.File, error) {
+ // On BSD systems, we could add the O_SHLOCK or O_EXLOCK flag to the OpenFile
+ // call instead of locking separately, but we have to support separate locking
+ // calls for Linux and Windows anyway, so it's simpler to use that approach
+ // consistently.
+
+ f, err := os.OpenFile(name, flag&^os.O_TRUNC, perm)
+ if err != nil {
+ return nil, err
+ }
+
+ switch flag & (os.O_RDONLY | os.O_WRONLY | os.O_RDWR) {
+ case os.O_WRONLY, os.O_RDWR:
+ err = filelock.Lock(f)
+ default:
+ err = filelock.RLock(f)
+ }
+ if err != nil {
+ f.Close()
+ return nil, err
+ }
+
+ if flag&os.O_TRUNC == os.O_TRUNC {
+ if err := f.Truncate(0); err != nil {
+ // The documentation for os.O_TRUNC says “if possible, truncate file when
+ // opened”, but doesn't define “possible” (golang.org/issue/28699).
+ // We'll treat regular files (and symlinks to regular files) as “possible”
+ // and ignore errors for the rest.
+ if fi, statErr := f.Stat(); statErr != nil || fi.Mode().IsRegular() {
+ filelock.Unlock(f)
+ f.Close()
+ return nil, err
+ }
+ }
+ }
+
+ return f, nil
+}
+
+func closeFile(f *os.File) error {
+ // Since locking syscalls operate on file descriptors, we must unlock the file
+ // while the descriptor is still valid — that is, before the file is closed —
+ // and avoid unlocking files that are already closed.
+ err := filelock.Unlock(f)
+
+ if closeErr := f.Close(); err == nil {
+ err = closeErr
+ }
+ return err
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go
new file mode 100644
index 000000000..a2ce794b9
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/lockedfile_plan9.go
@@ -0,0 +1,94 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build plan9
+
+package lockedfile
+
+import (
+ "io/fs"
+ "math/rand"
+ "os"
+ "strings"
+ "time"
+)
+
+// Opening an exclusive-use file returns an error.
+// The expected error strings are:
+//
+// - "open/create -- file is locked" (cwfs, kfs)
+// - "exclusive lock" (fossil)
+// - "exclusive use file already open" (ramfs)
+var lockedErrStrings = [...]string{
+ "file is locked",
+ "exclusive lock",
+ "exclusive use file already open",
+}
+
+// Even though plan9 doesn't support the Lock/RLock/Unlock functions to
+// manipulate already-open files, IsLocked is still meaningful: os.OpenFile
+// itself may return errors that indicate that a file with the ModeExclusive bit
+// set is already open.
+func isLocked(err error) bool {
+ s := err.Error()
+
+ for _, frag := range lockedErrStrings {
+ if strings.Contains(s, frag) {
+ return true
+ }
+ }
+
+ return false
+}
+
+func openFile(name string, flag int, perm fs.FileMode) (*os.File, error) {
+ // Plan 9 uses a mode bit instead of explicit lock/unlock syscalls.
+ //
+ // Per http://man.cat-v.org/plan_9/5/stat: “Exclusive use files may be open
+ // for I/O by only one fid at a time across all clients of the server. If a
+ // second open is attempted, it draws an error.”
+ //
+ // So we can try to open a locked file, but if it fails we're on our own to
+ // figure out when it becomes available. We'll use exponential backoff with
+ // some jitter and an arbitrary limit of 500ms.
+
+ // If the file was unpacked or created by some other program, it might not
+ // have the ModeExclusive bit set. Set it before we call OpenFile, so that we
+ // can be confident that a successful OpenFile implies exclusive use.
+ if fi, err := os.Stat(name); err == nil {
+ if fi.Mode()&fs.ModeExclusive == 0 {
+ if err := os.Chmod(name, fi.Mode()|fs.ModeExclusive); err != nil {
+ return nil, err
+ }
+ }
+ } else if !os.IsNotExist(err) {
+ return nil, err
+ }
+
+ nextSleep := 1 * time.Millisecond
+ const maxSleep = 500 * time.Millisecond
+ for {
+ f, err := os.OpenFile(name, flag, perm|fs.ModeExclusive)
+ if err == nil {
+ return f, nil
+ }
+
+ if !isLocked(err) {
+ return nil, err
+ }
+
+ time.Sleep(nextSleep)
+
+ nextSleep += nextSleep
+ if nextSleep > maxSleep {
+ nextSleep = maxSleep
+ }
+ // Apply 10% jitter to avoid synchronizing collisions.
+ nextSleep += time.Duration((0.1*rand.Float64() - 0.05) * float64(nextSleep))
+ }
+}
+
+func closeFile(f *os.File) error {
+ return f.Close()
+}
diff --git a/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go b/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go
new file mode 100644
index 000000000..180a36c62
--- /dev/null
+++ b/vendor/github.com/rogpeppe/go-internal/lockedfile/mutex.go
@@ -0,0 +1,67 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package lockedfile
+
+import (
+ "fmt"
+ "os"
+ "sync"
+)
+
+// A Mutex provides mutual exclusion within and across processes by locking a
+// well-known file. Such a file generally guards some other part of the
+// filesystem: for example, a Mutex file in a directory might guard access to
+// the entire tree rooted in that directory.
+//
+// Mutex does not implement sync.Locker: unlike a sync.Mutex, a lockedfile.Mutex
+// can fail to lock (e.g. if there is a permission error in the filesystem).
+//
+// Like a sync.Mutex, a Mutex may be included as a field of a larger struct but
+// must not be copied after first use. The Path field must be set before first
+// use and must not be change thereafter.
+type Mutex struct {
+ Path string // The path to the well-known lock file. Must be non-empty.
+ mu sync.Mutex // A redundant mutex. The race detector doesn't know about file locking, so in tests we may need to lock something that it understands.
+}
+
+// MutexAt returns a new Mutex with Path set to the given non-empty path.
+func MutexAt(path string) *Mutex {
+ if path == "" {
+ panic("lockedfile.MutexAt: path must be non-empty")
+ }
+ return &Mutex{Path: path}
+}
+
+func (mu *Mutex) String() string {
+ return fmt.Sprintf("lockedfile.Mutex(%s)", mu.Path)
+}
+
+// Lock attempts to lock the Mutex.
+//
+// If successful, Lock returns a non-nil unlock function: it is provided as a
+// return-value instead of a separate method to remind the caller to check the
+// accompanying error. (See https://golang.org/issue/20803.)
+func (mu *Mutex) Lock() (unlock func(), err error) {
+ if mu.Path == "" {
+ panic("lockedfile.Mutex: missing Path during Lock")
+ }
+
+ // We could use either O_RDWR or O_WRONLY here. If we choose O_RDWR and the
+ // file at mu.Path is write-only, the call to OpenFile will fail with a
+ // permission error. That's actually what we want: if we add an RLock method
+ // in the future, it should call OpenFile with O_RDONLY and will require the
+ // files must be readable, so we should not let the caller make any
+ // assumptions about Mutex working with write-only files.
+ f, err := OpenFile(mu.Path, os.O_RDWR|os.O_CREATE, 0666)
+ if err != nil {
+ return nil, err
+ }
+ mu.mu.Lock()
+
+ return func() {
+ mu.mu.Unlock()
+ f.Close()
+ }, nil
+}
diff --git a/vendor/github.com/securego/gosec/v2/action.yml b/vendor/github.com/securego/gosec/v2/action.yml
index d4bc351c8..2b2deaab7 100644
--- a/vendor/github.com/securego/gosec/v2/action.yml
+++ b/vendor/github.com/securego/gosec/v2/action.yml
@@ -10,7 +10,7 @@ inputs:
runs:
using: 'docker'
- image: 'docker://securego/gosec:2.21.1'
+ image: 'docker://securego/gosec:2.21.3'
args:
- ${{ inputs.args }}
diff --git a/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go b/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go
index f2157442f..8d222384a 100644
--- a/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go
+++ b/vendor/github.com/securego/gosec/v2/analyzers/analyzerslist.go
@@ -51,7 +51,7 @@ func (al *AnalyzerList) AnalyzersInfo() (map[string]AnalyzerDefinition, map[stri
type AnalyzerFilter func(string) bool
// NewAnalyzerFilter is a closure that will include/exclude the analyzer ID's based on
-// the supplied boolean value.
+// the supplied boolean value (false means don't remove, true means exclude).
func NewAnalyzerFilter(action bool, analyzerIDs ...string) AnalyzerFilter {
analyzerlist := make(map[string]bool)
for _, analyzer := range analyzerIDs {
diff --git a/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go b/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go
index 3ef4825af..bebe9b834 100644
--- a/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go
+++ b/vendor/github.com/securego/gosec/v2/analyzers/conversion_overflow.go
@@ -40,7 +40,7 @@ type integer struct {
type rangeResult struct {
minValue int
maxValue uint
- explixitPositiveVals []uint
+ explicitPositiveVals []uint
explicitNegativeVals []int
isRangeCheck bool
convertFound bool
@@ -271,7 +271,7 @@ func hasExplicitRangeCheck(instr *ssa.Convert, dstType string) bool {
if result.isRangeCheck {
minValue = max(minValue, &result.minValue)
maxValue = min(maxValue, &result.maxValue)
- explicitPositiveVals = append(explicitPositiveVals, result.explixitPositiveVals...)
+ explicitPositiveVals = append(explicitPositiveVals, result.explicitPositiveVals...)
explicitNegativeVals = append(explicitNegativeVals, result.explicitNegativeVals...)
}
case *ssa.Call:
@@ -325,16 +325,17 @@ func getResultRange(ifInstr *ssa.If, instr *ssa.Convert, visitedIfs map[*ssa.If]
result.convertFound = true
result.minValue = max(result.minValue, thenBounds.minValue)
result.maxValue = min(result.maxValue, thenBounds.maxValue)
- result.explixitPositiveVals = append(result.explixitPositiveVals, thenBounds.explixitPositiveVals...)
- result.explicitNegativeVals = append(result.explicitNegativeVals, thenBounds.explicitNegativeVals...)
} else if elseBounds.convertFound {
result.convertFound = true
result.minValue = max(result.minValue, elseBounds.minValue)
result.maxValue = min(result.maxValue, elseBounds.maxValue)
- result.explixitPositiveVals = append(result.explixitPositiveVals, elseBounds.explixitPositiveVals...)
- result.explicitNegativeVals = append(result.explicitNegativeVals, elseBounds.explicitNegativeVals...)
}
+ result.explicitPositiveVals = append(result.explicitPositiveVals, thenBounds.explixitPositiveVals...)
+ result.explicitNegativeVals = append(result.explicitNegativeVals, thenBounds.explicitNegativeVals...)
+ result.explicitPositiveVals = append(result.explicitPositiveVals, elseBounds.explixitPositiveVals...)
+ result.explicitNegativeVals = append(result.explicitNegativeVals, elseBounds.explicitNegativeVals...)
+
return result
}
@@ -344,15 +345,26 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con
operandsFlipped := false
compareVal, op := getRealValueFromOperation(instr.X)
- if x != compareVal {
- y, operandsFlipped = x, true
+
+ // Handle FieldAddr
+ if fieldAddr, ok := compareVal.(*ssa.FieldAddr); ok {
+ compareVal = fieldAddr
+ }
+
+ if !isSameOrRelated(x, compareVal) {
+ y = x
+ operandsFlipped = true
}
constVal, ok := y.(*ssa.Const)
if !ok {
return
}
-
+ // TODO: constVal.Value nil check avoids #1229 panic but seems to be hiding a bug in the code above or in x/tools/go/ssa.
+ if constVal.Value == nil {
+ // log.Fatalf("[gosec] constVal.Value is nil flipped=%t, constVal=%#v, binOp=%#v", operandsFlipped, constVal, binOp)
+ return
+ }
switch binOp.Op {
case token.LEQ, token.LSS:
updateMinMaxForLessOrEqual(result, constVal, binOp.Op, operandsFlipped, successPathConvert)
@@ -362,25 +374,12 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con
if !successPathConvert {
break
}
-
- // Determine if the constant value is positive or negative.
- if strings.Contains(constVal.String(), "-") {
- result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64()))
- } else {
- result.explixitPositiveVals = append(result.explixitPositiveVals, uint(constVal.Uint64()))
- }
-
+ updateExplicitValues(result, constVal)
case token.NEQ:
if successPathConvert {
break
}
-
- // Determine if the constant value is positive or negative.
- if strings.Contains(constVal.String(), "-") {
- result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64()))
- } else {
- result.explixitPositiveVals = append(result.explixitPositiveVals, uint(constVal.Uint64()))
- }
+ updateExplicitValues(result, constVal)
}
if op == "neg" {
@@ -391,11 +390,19 @@ func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Con
result.maxValue = uint(min)
}
if max <= math.MaxInt {
- result.minValue = int(max) //nolint:gosec
+ result.minValue = int(max)
}
}
}
+func updateExplicitValues(result *rangeResult, constVal *ssa.Const) {
+ if strings.Contains(constVal.String(), "-") {
+ result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64()))
+ } else {
+ result.explicitPositiveVals = append(result.explicitPositiveVals, uint(constVal.Uint64()))
+ }
+}
+
func updateMinMaxForLessOrEqual(result *rangeResult, constVal *ssa.Const, op token.Token, operandsFlipped bool, successPathConvert bool) {
// If the success path has a conversion and the operands are not flipped, then the constant value is the maximum value.
if successPathConvert && !operandsFlipped {
@@ -439,6 +446,8 @@ func walkBranchForConvert(block *ssa.BasicBlock, instr *ssa.Convert, visitedIfs
if result.isRangeCheck {
bounds.minValue = toPtr(max(result.minValue, bounds.minValue))
bounds.maxValue = toPtr(min(result.maxValue, bounds.maxValue))
+ bounds.explixitPositiveVals = append(bounds.explixitPositiveVals, result.explicitPositiveVals...)
+ bounds.explicitNegativeVals = append(bounds.explicitNegativeVals, result.explicitNegativeVals...)
}
case *ssa.Call:
if v == instr.X {
@@ -463,9 +472,10 @@ func isRangeCheck(v ssa.Value, x ssa.Value) bool {
switch op := v.(type) {
case *ssa.BinOp:
switch op.Op {
- case token.LSS, token.LEQ, token.GTR, token.GEQ,
- token.EQL, token.NEQ:
- return op.X == compareVal || op.Y == compareVal
+ case token.LSS, token.LEQ, token.GTR, token.GEQ, token.EQL, token.NEQ:
+ leftMatch := isSameOrRelated(op.X, compareVal)
+ rightMatch := isSameOrRelated(op.Y, compareVal)
+ return leftMatch || rightMatch
}
}
return false
@@ -475,12 +485,36 @@ func getRealValueFromOperation(v ssa.Value) (ssa.Value, string) {
switch v := v.(type) {
case *ssa.UnOp:
if v.Op == token.SUB {
- return v.X, "neg"
+ val, _ := getRealValueFromOperation(v.X)
+ return val, "neg"
}
+ return getRealValueFromOperation(v.X)
+ case *ssa.FieldAddr:
+ return v, "field"
+ case *ssa.Alloc:
+ return v, "alloc"
}
return v, ""
}
+func isSameOrRelated(a, b ssa.Value) bool {
+ aVal, _ := getRealValueFromOperation(a)
+ bVal, _ := getRealValueFromOperation(b)
+
+ if aVal == bVal {
+ return true
+ }
+
+ // Check if both are FieldAddr operations referring to the same field of the same struct
+ if aField, aOk := aVal.(*ssa.FieldAddr); aOk {
+ if bField, bOk := bVal.(*ssa.FieldAddr); bOk {
+ return aField.X == bField.X && aField.Field == bField.Field
+ }
+ }
+
+ return false
+}
+
func explicitValsInRange(explicitPosVals []uint, explicitNegVals []int, dstInt integer) bool {
if len(explicitPosVals) == 0 && len(explicitNegVals) == 0 {
return false
diff --git a/vendor/github.com/sivchari/tenv/goreleaser.yaml b/vendor/github.com/sivchari/tenv/goreleaser.yaml
new file mode 100644
index 000000000..56db218d7
--- /dev/null
+++ b/vendor/github.com/sivchari/tenv/goreleaser.yaml
@@ -0,0 +1,26 @@
+project_name: tenv
+
+env:
+ - GO111MODULE=on
+
+builds:
+ - id: tenv
+ main: ./cmd/tenv/main.go
+ binary: tenv
+ env:
+ - CGO_ENABLED=0
+ goos:
+ - linux
+ - darwin
+ goarch:
+ - amd64
+ - arm64
+
+archives:
+ - id: tenv
+ builds:
+ - tenv
+ name_template: '{{ .Binary }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}'
+ format_overrides:
+ - goos: windows
+ format: zip
diff --git a/vendor/github.com/sivchari/tenv/tenv.go b/vendor/github.com/sivchari/tenv/tenv.go
index 999c5289d..657e60e4e 100644
--- a/vendor/github.com/sivchari/tenv/tenv.go
+++ b/vendor/github.com/sivchari/tenv/tenv.go
@@ -2,6 +2,8 @@ package tenv
import (
"go/ast"
+ "go/token"
+ "go/types"
"strings"
"golang.org/x/tools/go/analysis"
@@ -51,7 +53,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
}
func checkFuncDecl(pass *analysis.Pass, f *ast.FuncDecl, fileName string) {
- argName, ok := targetRunner(f.Type.Params.List, fileName)
+ argName, ok := targetRunner(pass, f.Type.Params.List, fileName)
if !ok {
return
}
@@ -59,7 +61,7 @@ func checkFuncDecl(pass *analysis.Pass, f *ast.FuncDecl, fileName string) {
}
func checkFuncLit(pass *analysis.Pass, f *ast.FuncLit, fileName string) {
- argName, ok := targetRunner(f.Type.Params.List, fileName)
+ argName, ok := targetRunner(pass, f.Type.Params.List, fileName)
if !ok {
return
}
@@ -70,45 +72,33 @@ func checkStmts(pass *analysis.Pass, stmts []ast.Stmt, funcName, argName string)
for _, stmt := range stmts {
switch stmt := stmt.(type) {
case *ast.ExprStmt:
- if !checkExprStmt(pass, stmt, funcName, argName) {
- continue
- }
+ checkExprStmt(pass, stmt, funcName, argName)
case *ast.IfStmt:
- if !checkIfStmt(pass, stmt, funcName, argName) {
- continue
- }
+ checkIfStmt(pass, stmt, funcName, argName)
case *ast.AssignStmt:
- if !checkAssignStmt(pass, stmt, funcName, argName) {
- continue
- }
+ checkAssignStmt(pass, stmt, funcName, argName)
case *ast.ForStmt:
checkForStmt(pass, stmt, funcName, argName)
}
}
}
-func checkExprStmt(pass *analysis.Pass, stmt *ast.ExprStmt, funcName, argName string) bool {
+func checkExprStmt(pass *analysis.Pass, stmt *ast.ExprStmt, funcName, argName string) {
callExpr, ok := stmt.X.(*ast.CallExpr)
if !ok {
- return false
+ return
}
checkArgs(pass, callExpr.Args, funcName, argName)
- fun, ok := callExpr.Fun.(*ast.SelectorExpr)
- if !ok {
- return false
- }
- x, ok := fun.X.(*ast.Ident)
- if !ok {
- return false
+ ident, ok := callExpr.Fun.(*ast.Ident)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(ident)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
- targetName := x.Name + "." + fun.Sel.Name
- if targetName == "os.Setenv" {
- if argName == "" {
- argName = "testing"
- }
- pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName)
+ fun, ok := callExpr.Fun.(*ast.SelectorExpr)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(fun.Sel)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
- return true
}
func checkArgs(pass *analysis.Pass, args []ast.Expr, funcName, argName string) {
@@ -117,83 +107,82 @@ func checkArgs(pass *analysis.Pass, args []ast.Expr, funcName, argName string) {
if !ok {
continue
}
- fun, ok := callExpr.Fun.(*ast.SelectorExpr)
- if !ok {
- continue
- }
- x, ok := fun.X.(*ast.Ident)
- if !ok {
- continue
+ ident, ok := callExpr.Fun.(*ast.Ident)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(ident)
+ checkObj(pass, obj, arg.Pos(), funcName, argName)
}
- targetName := x.Name + "." + fun.Sel.Name
- if targetName == "os.Setenv" {
- if argName == "" {
- argName = "testing"
- }
- pass.Reportf(arg.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName)
+ fun, ok := callExpr.Fun.(*ast.SelectorExpr)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(fun.Sel)
+ checkObj(pass, obj, arg.Pos(), funcName, argName)
}
}
}
-func checkIfStmt(pass *analysis.Pass, stmt *ast.IfStmt, funcName, argName string) bool {
+func checkIfStmt(pass *analysis.Pass, stmt *ast.IfStmt, funcName, argName string) {
assignStmt, ok := stmt.Init.(*ast.AssignStmt)
if !ok {
- return false
+ return
}
rhs, ok := assignStmt.Rhs[0].(*ast.CallExpr)
if !ok {
- return false
- }
- fun, ok := rhs.Fun.(*ast.SelectorExpr)
- if !ok {
- return false
+ return
}
- x, ok := fun.X.(*ast.Ident)
- if !ok {
- return false
+ ident, ok := rhs.Fun.(*ast.Ident)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(ident)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
- targetName := x.Name + "." + fun.Sel.Name
- if targetName == "os.Setenv" {
- if argName == "" {
- argName = "testing"
- }
- pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName)
+ fun, ok := rhs.Fun.(*ast.SelectorExpr)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(fun.Sel)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
- return true
}
-func checkAssignStmt(pass *analysis.Pass, stmt *ast.AssignStmt, funcName, argName string) bool {
+func checkAssignStmt(pass *analysis.Pass, stmt *ast.AssignStmt, funcName, argName string) {
rhs, ok := stmt.Rhs[0].(*ast.CallExpr)
if !ok {
- return false
+ return
+ }
+ ident, ok := rhs.Fun.(*ast.Ident)
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(ident)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
fun, ok := rhs.Fun.(*ast.SelectorExpr)
- if !ok {
- return false
+ if ok {
+ obj := pass.TypesInfo.ObjectOf(fun.Sel)
+ checkObj(pass, obj, stmt.Pos(), funcName, argName)
}
- x, ok := fun.X.(*ast.Ident)
- if !ok {
- return false
+}
+
+func checkObj(pass *analysis.Pass, obj types.Object, pos token.Pos, funcName, argName string) {
+ // For built-in objects, obj.Pkg() returns nil.
+ var pkgPrefix string
+ if pkg := obj.Pkg(); pkg != nil {
+ pkgPrefix = pkg.Name() + "."
}
- targetName := x.Name + "." + fun.Sel.Name
+
+ targetName := pkgPrefix + obj.Name()
if targetName == "os.Setenv" {
if argName == "" {
argName = "testing"
}
- pass.Reportf(stmt.Pos(), "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName)
+ pass.Reportf(pos, "os.Setenv() can be replaced by `%s.Setenv()` in %s", argName, funcName)
}
- return true
}
func checkForStmt(pass *analysis.Pass, stmt *ast.ForStmt, funcName, argName string) {
checkStmts(pass, stmt.Body.List, funcName, argName)
}
-func targetRunner(params []*ast.Field, fileName string) (string, bool) {
+func targetRunner(pass *analysis.Pass, params []*ast.Field, fileName string) (string, bool) {
for _, p := range params {
switch typ := p.Type.(type) {
case *ast.StarExpr:
- if checkStarExprTarget(typ) {
+ if checkStarExprTarget(pass, typ) {
if len(p.Names) == 0 {
return "", false
}
@@ -201,7 +190,7 @@ func targetRunner(params []*ast.Field, fileName string) (string, bool) {
return argName, true
}
case *ast.SelectorExpr:
- if checkSelectorExprTarget(typ) {
+ if checkSelectorExprTarget(pass, typ) {
if len(p.Names) == 0 {
return "", false
}
@@ -216,17 +205,12 @@ func targetRunner(params []*ast.Field, fileName string) (string, bool) {
return "", false
}
-func checkStarExprTarget(typ *ast.StarExpr) bool {
+func checkStarExprTarget(pass *analysis.Pass, typ *ast.StarExpr) bool {
selector, ok := typ.X.(*ast.SelectorExpr)
if !ok {
return false
}
- x, ok := selector.X.(*ast.Ident)
- if !ok {
- return false
- }
- targetName := x.Name + "." + selector.Sel.Name
- switch targetName {
+ switch pass.TypesInfo.TypeOf(selector).String() {
case "testing.T", "testing.B":
return true
default:
@@ -234,11 +218,6 @@ func checkStarExprTarget(typ *ast.StarExpr) bool {
}
}
-func checkSelectorExprTarget(typ *ast.SelectorExpr) bool {
- x, ok := typ.X.(*ast.Ident)
- if !ok {
- return false
- }
- targetName := x.Name + "." + typ.Sel.Name
- return targetName == "testing.TB"
+func checkSelectorExprTarget(pass *analysis.Pass, typ *ast.SelectorExpr) bool {
+ return pass.TypesInfo.TypeOf(typ).String() == "testing.TB"
}
diff --git a/vendor/github.com/sonatard/noctx/.gitignore b/vendor/github.com/sonatard/noctx/.gitignore
index 2d830686d..8dfb40ff1 100644
--- a/vendor/github.com/sonatard/noctx/.gitignore
+++ b/vendor/github.com/sonatard/noctx/.gitignore
@@ -1 +1,2 @@
coverage.out
+/noctx
diff --git a/vendor/github.com/sonatard/noctx/.golangci.yml b/vendor/github.com/sonatard/noctx/.golangci.yml
index 55ebeebdb..726cb7c89 100644
--- a/vendor/github.com/sonatard/noctx/.golangci.yml
+++ b/vendor/github.com/sonatard/noctx/.golangci.yml
@@ -1,14 +1,38 @@
-run:
- linters-settings:
- govet:
- enable-all: true
- linters:
- enable-all: true
- disable:
- - gochecknoglobals
- - gomnd
- - gocognit
- - nestif
- - nilnil
- - paralleltest
- - varnamelen \ No newline at end of file
+linters:
+ enable-all: true
+ disable:
+ - execinquery # deprecated
+ - exportloopref # deprecated
+ - gomnd # deprecated
+ - gochecknoglobals
+ - exhaustruct
+ - mnd
+ - gocognit
+ - nestif
+ - nilnil
+ - paralleltest
+ - varnamelen
+
+linters-settings:
+ govet:
+ enable-all: true
+ perfsprint:
+ err-error: true
+ errorf: true
+ sprintf1: true
+ strconcat: false
+ depguard:
+ rules:
+ main:
+ deny:
+ - pkg: "github.com/instana/testify"
+ desc: not allowed
+ - pkg: "github.com/pkg/errors"
+ desc: Should be replaced by standard lib errors package
+
+output:
+ show-stats: true
+ sort-results: true
+ sort-order:
+ - linter
+ - file
diff --git a/vendor/github.com/sonatard/noctx/.goreleaser.yml b/vendor/github.com/sonatard/noctx/.goreleaser.yml
new file mode 100644
index 000000000..9000b50a1
--- /dev/null
+++ b/vendor/github.com/sonatard/noctx/.goreleaser.yml
@@ -0,0 +1,37 @@
+version: 2
+project_name: noctx
+
+builds:
+ - binary: noctx
+
+ main: ./cmd/noctx/main.go
+ env:
+ - CGO_ENABLED=0
+ flags:
+ - -trimpath
+ goos:
+ - windows
+ - darwin
+ - linux
+ goarch:
+ - amd64
+ - 386
+ - arm
+ - arm64
+ goarm:
+ - 7
+ - 6
+ - 5
+ ignore:
+ - goos: darwin
+ goarch: 386
+
+archives:
+ - id: noctx
+ name_template: '{{ .ProjectName }}_v{{ .Version }}_{{ .Os }}_{{ .Arch }}{{ if .Arm }}v{{ .Arm }}{{ end }}{{ if .Mips }}_{{ .Mips }}{{ end }}'
+ format: tar.gz
+ format_overrides:
+ - goos: windows
+ format: zip
+ files:
+ - LICENSE
diff --git a/vendor/github.com/sonatard/noctx/Makefile b/vendor/github.com/sonatard/noctx/Makefile
index 1a27f6b59..585a9d7e2 100644
--- a/vendor/github.com/sonatard/noctx/Makefile
+++ b/vendor/github.com/sonatard/noctx/Makefile
@@ -1,6 +1,6 @@
-.PHONY: all imports test lint
+.PHONY: all imports test lint build
-all: imports test lint
+all: imports test lint build
imports:
goimports -w ./
@@ -14,3 +14,5 @@ test_coverage:
lint:
golangci-lint run ./...
+build:
+ go build -ldflags "-s -w" -trimpath ./cmd/noctx/
diff --git a/vendor/github.com/sonatard/noctx/README.md b/vendor/github.com/sonatard/noctx/README.md
index b3793fc96..00d0024c5 100644
--- a/vendor/github.com/sonatard/noctx/README.md
+++ b/vendor/github.com/sonatard/noctx/README.md
@@ -52,10 +52,11 @@ linters:
$ golangci-lint run
# Only noctx execute
-golangci-lint run --disable-all -E noctx
+golangci-lint run --enable-only noctx
```
## Detection rules
+
- Executing following functions
- `net/http.Get`
- `net/http.Head`
@@ -68,6 +69,7 @@ golangci-lint run --disable-all -E noctx
- `http.Request` returned by `http.NewRequest` function and passes it to other function.
## How to fix
+
- Send http request using `(*http.Client).Do(*http.Request)` method.
- In Go 1.13 and later, use `http.NewRequestWithContext` function instead of using `http.NewRequest` function.
- In Go 1.12 and earlier, call `(http.Request).WithContext(ctx)` after `http.NewRequest`.
@@ -107,7 +109,7 @@ func SendWithContext(ctx context.Context, body io.Reader) error {
// Change NewRequest to NewRequestWithContext and pass context it
req, err := http.NewRequestWithContext(ctx, http.MethodPost, "http://example.com", body)
if err != nil {
- return nil
+ return err
}
_, err = http.DefaultClient.Do(req)
if err != nil {
@@ -168,6 +170,7 @@ func main() {
```
## Reference
+
- [net/http - NewRequest](https://golang.org/pkg/net/http/#NewRequest)
- [net/http - NewRequestWithContext](https://golang.org/pkg/net/http/#NewRequestWithContext)
- [net/http - Request.WithContext](https://golang.org/pkg/net/http/#Request.WithContext)
diff --git a/vendor/github.com/sonatard/noctx/ngfunc/main.go b/vendor/github.com/sonatard/noctx/ngfunc/main.go
index 46306218d..8d8d97d93 100644
--- a/vendor/github.com/sonatard/noctx/ngfunc/main.go
+++ b/vendor/github.com/sonatard/noctx/ngfunc/main.go
@@ -39,6 +39,7 @@ func ngCalledFuncs(pass *analysis.Pass, ngFuncs []*types.Func) []*Report {
if !ok {
panic(fmt.Sprintf("%T is not *buildssa.SSA", pass.ResultOf[buildssa.Analyzer]))
}
+
for _, sf := range ssa.SrcFuncs {
for _, b := range sf.Blocks {
for _, instr := range b.Instrs {
diff --git a/vendor/github.com/sonatard/noctx/ngfunc/report.go b/vendor/github.com/sonatard/noctx/ngfunc/report.go
index e50051798..735aa1cf9 100644
--- a/vendor/github.com/sonatard/noctx/ngfunc/report.go
+++ b/vendor/github.com/sonatard/noctx/ngfunc/report.go
@@ -24,6 +24,6 @@ func (n *Report) Message() string {
func report(pass *analysis.Pass, reports []*Report) {
for _, report := range reports {
- pass.Reportf(report.Pos(), report.Message())
+ pass.Reportf(report.Pos(), "%s", report.Message())
}
}
diff --git a/vendor/github.com/sonatard/noctx/ngfunc/types.go b/vendor/github.com/sonatard/noctx/ngfunc/types.go
index f1877386c..8f81c6aa2 100644
--- a/vendor/github.com/sonatard/noctx/ngfunc/types.go
+++ b/vendor/github.com/sonatard/noctx/ngfunc/types.go
@@ -1,7 +1,7 @@
package ngfunc
import (
- "fmt"
+ "errors"
"go/types"
"strings"
@@ -9,7 +9,7 @@ import (
"golang.org/x/tools/go/analysis"
)
-var errNotFound = fmt.Errorf("function not found")
+var errNotFound = errors.New("function not found")
func typeFuncs(pass *analysis.Pass, funcs []string) []*types.Func {
fs := make([]*types.Func, 0, len(funcs))
diff --git a/vendor/github.com/sonatard/noctx/noctx.go b/vendor/github.com/sonatard/noctx/noctx.go
index 89e0446ec..fbffb66e7 100644
--- a/vendor/github.com/sonatard/noctx/noctx.go
+++ b/vendor/github.com/sonatard/noctx/noctx.go
@@ -2,6 +2,7 @@ package noctx
import (
"fmt"
+
"github.com/sonatard/noctx/ngfunc"
"github.com/sonatard/noctx/reqwithoutctx"
"golang.org/x/tools/go/analysis"
diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go
index 1c94e3148..3bf2ea01f 100644
--- a/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go
+++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/report.go
@@ -21,6 +21,6 @@ func (n *Report) Message() string {
func report(pass *analysis.Pass, reports []*Report) {
for _, report := range reports {
- pass.Reportf(report.Pos(), report.Message())
+ pass.Reportf(report.Pos(), "%s", report.Message())
}
}
diff --git a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go
index d7e0f5084..62707c6b5 100644
--- a/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go
+++ b/vendor/github.com/sonatard/noctx/reqwithoutctx/ssa.go
@@ -5,7 +5,6 @@ import (
"go/types"
"github.com/gostaticanalysis/analysisutil"
-
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
@@ -97,7 +96,7 @@ func (a *Analyzer) usedReqByCall(call *ssa.Call) []*ssa.Extract {
exts := make([]*ssa.Extract, 0, len(args))
// skip net/http.Request method call
- if call.Common().Signature().Recv() != nil && types.Identical(call.Value().Type(), a.requestType) {
+ if recv := call.Common().Signature().Recv(); recv != nil && types.Identical(recv.Type(), a.requestType) {
return exts
}
diff --git a/vendor/github.com/tetafro/godot/checks.go b/vendor/github.com/tetafro/godot/checks.go
index df5019f6c..d30766358 100644
--- a/vendor/github.com/tetafro/godot/checks.go
+++ b/vendor/github.com/tetafro/godot/checks.go
@@ -230,6 +230,8 @@ func isSpecialBlock(comment string) bool {
strings.Contains(comment, "#define")) {
return true
}
+ // This should only be skipped in test files, but we don't have this
+ // information here, so - always skip
if strings.HasPrefix(comment, "// Output:") ||
strings.HasPrefix(comment, "// Unordered output:") {
return true
diff --git a/vendor/github.com/tetafro/godot/getters.go b/vendor/github.com/tetafro/godot/getters.go
index 7d3d22fb1..de3d06e10 100644
--- a/vendor/github.com/tetafro/godot/getters.go
+++ b/vendor/github.com/tetafro/godot/getters.go
@@ -209,12 +209,8 @@ func (pf *parsedFile) getAllComments(exclude []*regexp.Regexp) []comment {
// special lines (e.g., tags or indented code examples), they are replaced
// with `specialReplacer` to skip checks for them.
// The result can be multiline.
-//
-//nolint:cyclop
func getText(comment *ast.CommentGroup, exclude []*regexp.Regexp) (s string) {
- if len(comment.List) == 1 &&
- strings.HasPrefix(comment.List[0].Text, "/*") &&
- isSpecialBlock(comment.List[0].Text) {
+ if len(comment.List) > 0 && isSpecialBlock(comment.List[0].Text) {
return ""
}
diff --git a/vendor/github.com/timonwong/loggercheck/.golangci.yml b/vendor/github.com/timonwong/loggercheck/.golangci.yml
index 287327893..6d5e17bef 100644
--- a/vendor/github.com/timonwong/loggercheck/.golangci.yml
+++ b/vendor/github.com/timonwong/loggercheck/.golangci.yml
@@ -20,7 +20,7 @@ linters-settings:
min-complexity: 15
goimports:
local-prefixes: github.com/timonwong/loggercheck
- gomnd:
+ mnd:
# don't include the "operation" and "assign"
checks:
- argument
@@ -36,7 +36,7 @@ linters-settings:
- strings.SplitN
- strconv.ParseInt
govet:
- check-shadowing: true
+ shadow: true
lll:
line-length: 140
misspell:
@@ -52,7 +52,7 @@ linters:
- dogsled
- dupl
- errcheck
- - exportloopref
+ - copyloopvar
- funlen
- gochecknoinits
- goconst
@@ -60,7 +60,7 @@ linters:
- gocyclo
- gofumpt
- goimports
- - gomnd
+ - mnd
- goprintffuncname
- gosec
- gosimple
@@ -85,10 +85,10 @@ issues:
exclude-rules:
- path: _test\.go
linters:
- - gomnd
+ - mnd
+ exclude-dirs:
+ - testdata
run:
timeout: 5m
- go: '1.17'
- skip-dirs:
- - testdata \ No newline at end of file
+ go: '1.23' \ No newline at end of file
diff --git a/vendor/github.com/timonwong/loggercheck/README.md b/vendor/github.com/timonwong/loggercheck/README.md
index 14aeca371..d8f86fc41 100644
--- a/vendor/github.com/timonwong/loggercheck/README.md
+++ b/vendor/github.com/timonwong/loggercheck/README.md
@@ -7,6 +7,9 @@ A linter checks the odd number of key and value pairs for common logger librarie
- [klog](https://github.com/kubernetes/klog)
- [logr](https://github.com/go-logr/logr)
- [zap](https://github.com/uber-go/zap)
+- [log/slog](https://pkg.go.dev/log/slog)
+
+It's recommended to use loggercheck with [golangci-lint](https://golangci-lint.run/usage/linters/#loggercheck).
## Badges
@@ -80,14 +83,14 @@ import (
func Example() {
log := logr.Discard()
- log = log.WithValues("key")
- log.Info("message", "key1", "value1", "key2", "value2", "key3")
+ log = log.WithValues("key")
+ log.Info("message", "key1", "value1", "key2", "value2", "key3")
log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2")
log.Error(fmt.Errorf("error"), "message", "key1", "value1", "key2", "value2")
var log2 logr.Logger
log2 = log
- log2.Info("message", "key1")
+ log2.Info("message", "key1")
log3 := logr.FromContextOrDiscard(context.TODO())
log3.Error(fmt.Errorf("error"), "message", "key1")
diff --git a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go b/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go
deleted file mode 100644
index 9d88d21c4..000000000
--- a/vendor/github.com/timonwong/loggercheck/internal/bytebufferpool/pool.go
+++ /dev/null
@@ -1,22 +0,0 @@
-package bytebufferpool
-
-import (
- "bytes"
- "sync"
-)
-
-var pool = sync.Pool{
- New: func() interface{} {
- return new(bytes.Buffer)
- },
-}
-
-func Get() *bytes.Buffer {
- buf := pool.Get().(*bytes.Buffer)
- buf.Reset()
- return buf
-}
-
-func Put(buf *bytes.Buffer) {
- pool.Put(buf)
-}
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go
index 5615636ef..5fa1cfb2c 100644
--- a/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/checker.go
@@ -29,8 +29,7 @@ func ExecuteChecker(c Checker, pass *analysis.Pass, call CallContext, cfg Config
nparams := params.Len() // variadic => nonzero
startIndex := nparams - 1
- lastArg := params.At(nparams - 1)
- iface, ok := lastArg.Type().(*types.Slice).Elem().(*types.Interface)
+ iface, ok := types.Unalias(params.At(startIndex).Type().(*types.Slice).Elem()).(*types.Interface)
if !ok || !iface.Empty() {
return // final (args) param is not ...interface{}
}
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go
index 42cbd0193..977f5d70c 100644
--- a/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/common.go
@@ -6,11 +6,10 @@ import (
"go/printer"
"go/token"
"go/types"
+ "strings"
"unicode/utf8"
"golang.org/x/tools/go/analysis"
-
- "github.com/timonwong/loggercheck/internal/bytebufferpool"
)
const (
@@ -31,9 +30,7 @@ func extractValueFromStringArg(pass *analysis.Pass, arg ast.Expr) (value string,
func renderNodeEllipsis(fset *token.FileSet, v interface{}) string {
const maxLen = 20
- buf := bytebufferpool.Get()
- defer bytebufferpool.Put(buf)
-
+ buf := &strings.Builder{}
_ = printer.Fprint(buf, fset, v)
s := buf.String()
if utf8.RuneCountInString(s) > maxLen {
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go
new file mode 100644
index 000000000..a09a54f99
--- /dev/null
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/filter.go
@@ -0,0 +1,35 @@
+package checkers
+
+import (
+ "go/ast"
+ "go/types"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+func filterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr, objName string) []ast.Expr {
+ // Check the argument count
+ filtered := make([]ast.Expr, 0, len(keyAndValues))
+ for _, arg := range keyAndValues {
+ // Skip any object type field we found
+ switch arg := arg.(type) {
+ case *ast.CallExpr, *ast.Ident:
+ typ := types.Unalias(pass.TypesInfo.TypeOf(arg))
+
+ switch typ := typ.(type) {
+ case *types.Named:
+ obj := typ.Obj()
+ if obj != nil && obj.Name() == objName {
+ continue
+ }
+
+ default:
+ // pass
+ }
+ }
+
+ filtered = append(filtered, arg)
+ }
+
+ return filtered
+}
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go
index b38f46f20..926b57e04 100644
--- a/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/printf/printf.go
@@ -157,7 +157,7 @@ func (s *formatState) parsePrecision() bool {
func parsePrintfVerb(format string) *formatState {
state := &formatState{
format: format,
- flags: make([]byte, 0, 5), //nolint:gomnd
+ flags: make([]byte, 0, 5), //nolint:mnd
nbytes: 1, // There's guaranteed to be a percent sign.
}
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go
new file mode 100644
index 000000000..5812e6660
--- /dev/null
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/slog.go
@@ -0,0 +1,19 @@
+package checkers
+
+import (
+ "go/ast"
+
+ "golang.org/x/tools/go/analysis"
+)
+
+type Slog struct {
+ General
+}
+
+func (z Slog) FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr {
+ // check slog.Group() constructed group slog.Attr
+ // since we also check `slog.Group` so it is OK skip here
+ return filterKeyAndValues(pass, keyAndValues, "Attr")
+}
+
+var _ Checker = (*Slog)(nil)
diff --git a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go b/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go
index 2356f8348..4dac21f78 100644
--- a/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go
+++ b/vendor/github.com/timonwong/loggercheck/internal/checkers/zap.go
@@ -2,7 +2,6 @@ package checkers
import (
"go/ast"
- "go/types"
"golang.org/x/tools/go/analysis"
)
@@ -12,31 +11,11 @@ type Zap struct {
}
func (z Zap) FilterKeyAndValues(pass *analysis.Pass, keyAndValues []ast.Expr) []ast.Expr {
- // Check the argument count
- filtered := make([]ast.Expr, 0, len(keyAndValues))
- for _, arg := range keyAndValues {
- // Skip any zapcore.Field we found
- switch arg := arg.(type) {
- case *ast.CallExpr, *ast.Ident:
- typ := pass.TypesInfo.TypeOf(arg)
- switch typ := typ.(type) {
- case *types.Named:
- obj := typ.Obj()
- // This is a strongly-typed field. Consume it and move on.
- // Actually it's go.uber.org/zap/zapcore.Field, however for simplicity
- // we don't check the import path
- if obj != nil && obj.Name() == "Field" {
- continue
- }
- default:
- // pass
- }
- }
-
- filtered = append(filtered, arg)
- }
-
- return filtered
+ // Skip any zapcore.Field we found
+ // This is a strongly-typed field. Consume it and move on.
+ // Actually it's go.uber.org/zap/zapcore.Field, however for simplicity
+ // we don't check the import path
+ return filterKeyAndValues(pass, keyAndValues, "Field")
}
var _ Checker = (*Zap)(nil)
diff --git a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go b/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go
index 27d6ebb27..3ed69b5bd 100644
--- a/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go
+++ b/vendor/github.com/timonwong/loggercheck/internal/rules/rules.go
@@ -7,8 +7,6 @@ import (
"go/types"
"io"
"strings"
-
- "github.com/timonwong/loggercheck/internal/bytebufferpool"
)
var ErrInvalidRule = errors.New("invalid rule format")
@@ -44,8 +42,7 @@ func (rs *Ruleset) Match(fn *types.Func) bool {
}
func receiverTypeOf(recvType types.Type) string {
- buf := bytebufferpool.Get()
- defer bytebufferpool.Put(buf)
+ buf := &strings.Builder{}
var recvNamed *types.Named
switch recvType := recvType.(type) {
diff --git a/vendor/github.com/timonwong/loggercheck/loggercheck.go b/vendor/github.com/timonwong/loggercheck/loggercheck.go
index 8bd10aee8..d418c7629 100644
--- a/vendor/github.com/timonwong/loggercheck/loggercheck.go
+++ b/vendor/github.com/timonwong/loggercheck/loggercheck.go
@@ -54,7 +54,7 @@ func newLoggerCheck(opts ...Option) *loggercheck {
}
fs.StringVar(&l.ruleFile, "rulefile", "", "path to a file contains a list of rules")
- fs.Var(&l.disable, "disable", "comma-separated list of disabled logger checker (kitlog,klog,logr,zap)")
+ fs.Var(&l.disable, "disable", "comma-separated list of disabled logger checker (kitlog,klog,logr,zap,slog)")
fs.BoolVar(&l.requireStringKey, "requirestringkey", false, "require all logging keys to be inlined constant strings")
fs.BoolVar(&l.noPrintfLike, "noprintflike", false, "require printf-like format specifier not present in args")
diff --git a/vendor/github.com/timonwong/loggercheck/staticrules.go b/vendor/github.com/timonwong/loggercheck/staticrules.go
index f955b3434..1398e47b2 100644
--- a/vendor/github.com/timonwong/loggercheck/staticrules.go
+++ b/vendor/github.com/timonwong/loggercheck/staticrules.go
@@ -39,10 +39,38 @@ var (
"github.com/go-kit/log.WithSuffix",
"(github.com/go-kit/log.Logger).Log",
}),
+ mustNewStaticRuleSet("slog", []string{
+ "log/slog.Group",
+
+ "log/slog.With",
+
+ "log/slog.Debug",
+ "log/slog.Info",
+ "log/slog.Warn",
+ "log/slog.Error",
+
+ "log/slog.DebugContext",
+ "log/slog.InfoContext",
+ "log/slog.WarnContext",
+ "log/slog.ErrorContext",
+
+ "(*log/slog.Logger).With",
+
+ "(*log/slog.Logger).Debug",
+ "(*log/slog.Logger).Info",
+ "(*log/slog.Logger).Warn",
+ "(*log/slog.Logger).Error",
+
+ "(*log/slog.Logger).DebugContext",
+ "(*log/slog.Logger).InfoContext",
+ "(*log/slog.Logger).WarnContext",
+ "(*log/slog.Logger).ErrorContext",
+ }),
}
checkerByRulesetName = map[string]checkers.Checker{
// by default, checkers.General will be used.
- "zap": checkers.Zap{},
+ "zap": checkers.Zap{},
+ "slog": checkers.Slog{},
}
)
diff --git a/vendor/github.com/uudashr/iface/LICENSE b/vendor/github.com/uudashr/iface/LICENSE
new file mode 100644
index 000000000..261eeb9e9
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/LICENSE
@@ -0,0 +1,201 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/uudashr/iface/identical/doc.go b/vendor/github.com/uudashr/iface/identical/doc.go
new file mode 100644
index 000000000..5b848a913
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/identical/doc.go
@@ -0,0 +1,3 @@
+// Package identical defines an Analyzer that identifies interfaces in the same
+// package with identical methods or constraints.
+package identical
diff --git a/vendor/github.com/uudashr/iface/identical/identical.go b/vendor/github.com/uudashr/iface/identical/identical.go
new file mode 100644
index 000000000..bd573cfc4
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/identical/identical.go
@@ -0,0 +1,138 @@
+package identical
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+ "reflect"
+
+ "github.com/uudashr/iface/internal/directive"
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+// Analyzer is the duplicate interface analyzer.
+var Analyzer = newAnalyzer()
+
+func newAnalyzer() *analysis.Analyzer {
+ r := runner{}
+
+ analyzer := &analysis.Analyzer{
+ Name: "identical",
+ Doc: "Identifies interfaces in the same package that have identical method sets",
+ URL: "https://pkg.go.dev/github.com/uudashr/iface/duplicate",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: r.run,
+ }
+
+ analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode")
+
+ return analyzer
+}
+
+type runner struct {
+ debug bool
+}
+
+func (r *runner) run(pass *analysis.Pass) (interface{}, error) {
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ // Collect interface type declarations
+ ifaceDecls := make(map[string]token.Pos)
+ ifaceTypes := make(map[string]*types.Interface)
+
+ nodeFilter := []ast.Node{
+ (*ast.GenDecl)(nil),
+ }
+
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ decl, ok := n.(*ast.GenDecl)
+ if !ok {
+ return
+ }
+
+ if r.debug {
+ fmt.Printf("GenDecl: %v specs=%d\n", decl.Tok, len(decl.Specs))
+ }
+
+ if decl.Tok != token.TYPE {
+ return
+ }
+
+ for i, spec := range decl.Specs {
+ if r.debug {
+ fmt.Printf(" spec[%d]: %v %v\n", i, spec, reflect.TypeOf(spec))
+ }
+
+ ts, ok := spec.(*ast.TypeSpec)
+ if !ok {
+ return
+ }
+
+ ifaceType, ok := ts.Type.(*ast.InterfaceType)
+ if !ok {
+ return
+ }
+
+ if r.debug {
+ fmt.Println("Interface declaration:", ts.Name.Name, ts.Pos(), len(ifaceType.Methods.List))
+
+ for i, field := range ifaceType.Methods.List {
+ switch ft := field.Type.(type) {
+ case *ast.FuncType:
+ fmt.Printf(" [%d] Field: func %s %v %v\n", i, field.Names[0].Name, reflect.TypeOf(field.Type), field.Pos())
+ case *ast.Ident:
+ fmt.Printf(" [%d] Field: iface %s %v %v\n", i, ft.Name, reflect.TypeOf(field.Type), field.Pos())
+ default:
+ fmt.Printf(" [%d] Field: unknown %v\n", i, reflect.TypeOf(ft))
+ }
+ }
+ }
+
+ dir := directive.ParseIgnore(decl.Doc)
+ if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) {
+ // skip due to ignore directive
+ continue
+ }
+
+ ifaceDecls[ts.Name.Name] = ts.Pos()
+
+ obj := pass.TypesInfo.Defs[ts.Name]
+ if obj == nil {
+ return
+ }
+
+ iface, ok := obj.Type().Underlying().(*types.Interface)
+ if !ok {
+ return
+ }
+
+ ifaceTypes[ts.Name.Name] = iface
+ }
+ })
+
+Loop:
+ for name, typ := range ifaceTypes {
+ for otherName, otherTyp := range ifaceTypes {
+ if name == otherName {
+ continue
+ }
+
+ if !types.Identical(typ, otherTyp) {
+ continue
+ }
+
+ if r.debug {
+ fmt.Println("Identical interface:", name, "and", otherName)
+ }
+
+ pass.Reportf(ifaceDecls[name], "interface %s contains identical methods or type constraints from another interface, causing redundancy", name)
+
+ continue Loop
+ }
+ }
+
+ return nil, nil
+}
diff --git a/vendor/github.com/uudashr/iface/internal/directive/directive.go b/vendor/github.com/uudashr/iface/internal/directive/directive.go
new file mode 100644
index 000000000..05c62928e
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/internal/directive/directive.go
@@ -0,0 +1,76 @@
+package directive
+
+import (
+ "go/ast"
+ "slices"
+ "strings"
+)
+
+// Ignore represent a special instruction embebded in the source code.
+//
+// The directive can be as simple as
+//
+// //iface:ignore
+//
+// or consist of name
+//
+// //iface:ignore=unused
+//
+// or multiple names
+//
+// //iface:ignore=unused,identical
+type Ignore struct {
+ Names []string
+}
+
+// ParseIgnore parse the directive from the comments.
+func ParseIgnore(doc *ast.CommentGroup) *Ignore {
+ if doc == nil {
+ return nil
+ }
+
+ for _, comment := range doc.List {
+ text := strings.TrimSpace(comment.Text)
+ if text == "//iface:ignore" {
+ return &Ignore{}
+ }
+
+ // parse the Names if exists
+ if val, found := strings.CutPrefix(text, "//iface:ignore="); found {
+ val = strings.TrimSpace(val)
+ if val == "" {
+ return &Ignore{}
+ }
+
+ names := strings.Split(val, ",")
+ if len(names) == 0 {
+ continue
+ }
+
+ for i, name := range names {
+ names[i] = strings.TrimSpace(name)
+ }
+
+ if len(names) > 0 {
+ return &Ignore{Names: names}
+ }
+
+ return &Ignore{}
+ }
+ }
+
+ return nil
+}
+
+func (i *Ignore) hasName(name string) bool {
+ return slices.Contains(i.Names, name)
+}
+
+// ShouldIgnore return true if the name should be ignored.
+func (i *Ignore) ShouldIgnore(name string) bool {
+ if len(i.Names) == 0 {
+ return true
+ }
+
+ return i.hasName(name)
+}
diff --git a/vendor/github.com/uudashr/iface/opaque/doc.go b/vendor/github.com/uudashr/iface/opaque/doc.go
new file mode 100644
index 000000000..1e5a53897
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/opaque/doc.go
@@ -0,0 +1,3 @@
+// Package opaque defines an Analyzer to that detects interfaces that are used
+// to abstract a single concrete implementation only.
+package opaque
diff --git a/vendor/github.com/uudashr/iface/opaque/opaque.go b/vendor/github.com/uudashr/iface/opaque/opaque.go
new file mode 100644
index 000000000..fda0f001b
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/opaque/opaque.go
@@ -0,0 +1,321 @@
+package opaque
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "reflect"
+ "strings"
+
+ "github.com/uudashr/iface/internal/directive"
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+// Analyzer is the opaque interface analyzer.
+var Analyzer = newAnalyzer()
+
+func newAnalyzer() *analysis.Analyzer {
+ r := runner{}
+
+ analyzer := &analysis.Analyzer{
+ Name: "opaque",
+ Doc: "Identifies functions that return interfaces, but the actual returned value is always a single concrete implementation.",
+ URL: "https://pkg.go.dev/github.com/uudashr/iface/opaque",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: r.run,
+ }
+
+ analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode")
+
+ return analyzer
+}
+
+type runner struct {
+ debug bool
+}
+
+func (r *runner) run(pass *analysis.Pass) (interface{}, error) {
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ // Find function declarations that return an interface
+
+ nodeFilter := []ast.Node{
+ (*ast.FuncDecl)(nil),
+ }
+
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ funcDecl := n.(*ast.FuncDecl)
+
+ if funcDecl.Recv != nil {
+ // skip methods
+ return
+ }
+
+ if funcDecl.Type.Results == nil {
+ // skip functions without return values
+ return
+ }
+
+ if r.debug {
+ fmt.Printf("Function declaration %s\n", funcDecl.Name.Name)
+ fmt.Printf(" Results len=%d\n", len(funcDecl.Type.Results.List))
+ }
+
+ dir := directive.ParseIgnore(funcDecl.Doc)
+ if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) {
+ // skip ignored function
+ return
+ }
+
+ // Pre-check, only function that has interface return type will be processed
+ var hasInterfaceReturnType bool
+
+ var outCount int
+
+ for i, result := range funcDecl.Type.Results.List {
+ outInc := 1
+ if namesLen := len(result.Names); namesLen > 0 {
+ outInc = namesLen
+ }
+
+ outCount += outInc
+
+ resType := result.Type
+ typ := pass.TypesInfo.TypeOf(resType)
+
+ if r.debug {
+ fmt.Printf(" [%d] len=%d %v %v %v | %v %v interface=%t\n", i, len(result.Names), result.Names, resType, reflect.TypeOf(resType), typ, reflect.TypeOf(typ), types.IsInterface(typ))
+ }
+
+ if types.IsInterface(typ) && !hasInterfaceReturnType {
+ hasInterfaceReturnType = true
+ }
+ }
+
+ if r.debug {
+ fmt.Printf(" hasInterface=%t outCount=%d\n", hasInterfaceReturnType, outCount)
+ }
+
+ if !hasInterfaceReturnType {
+ // skip, since it has no interface return type
+ return
+ }
+
+ // Collect types on every return statement
+ retStmtTypes := make([]map[types.Type]struct{}, outCount)
+ for i := range retStmtTypes {
+ retStmtTypes[i] = make(map[types.Type]struct{})
+ }
+
+ ast.Inspect(funcDecl.Body, func(n ast.Node) bool {
+ switch n := n.(type) {
+ case *ast.FuncLit:
+ // ignore nested functions
+ return false
+ case *ast.ReturnStmt:
+ if r.debug {
+ fmt.Printf(" Return statements %v len=%d\n", n.Results, len(n.Results))
+ }
+
+ for i, result := range n.Results {
+ if r.debug {
+ fmt.Printf(" [%d] %v %v\n", i, result, reflect.TypeOf(result))
+ }
+
+ switch res := result.(type) {
+ case *ast.CallExpr:
+ if r.debug {
+ fmt.Printf(" CallExpr Fun: %v %v\n", res.Fun, reflect.TypeOf(res.Fun))
+ }
+
+ typ := pass.TypesInfo.TypeOf(res)
+ switch typ := typ.(type) {
+ case *types.Tuple:
+ for i := 0; i < typ.Len(); i++ {
+ v := typ.At(i)
+ vTyp := v.Type()
+ retStmtTypes[i][vTyp] = struct{}{}
+
+ if r.debug {
+ fmt.Printf(" Tuple [%d]: %v %v | %v %v interface=%t\n", i, v, reflect.TypeOf(v), vTyp, reflect.TypeOf(vTyp), types.IsInterface(vTyp))
+ }
+ }
+ default:
+ retStmtTypes[i][typ] = struct{}{}
+ }
+
+ case *ast.Ident:
+ if r.debug {
+ fmt.Printf(" Ident: %v %v\n", res, reflect.TypeOf(res))
+ }
+
+ typ := pass.TypesInfo.TypeOf(res)
+
+ if r.debug {
+ fmt.Printf(" Ident type: %v %v interface=%t\n", typ, reflect.TypeOf(typ), types.IsInterface(typ))
+ }
+
+ retStmtTypes[i][typ] = struct{}{}
+ case *ast.UnaryExpr:
+ if r.debug {
+ fmt.Printf(" UnaryExpr X: %v \n", res.X)
+ }
+
+ typ := pass.TypesInfo.TypeOf(res)
+
+ if r.debug {
+ fmt.Printf(" UnaryExpr type: %v %v interface=%t\n", typ, reflect.TypeOf(typ), types.IsInterface(typ))
+ }
+
+ retStmtTypes[i][typ] = struct{}{}
+ default:
+ if r.debug {
+ fmt.Printf(" Unknown: %v %v\n", res, reflect.TypeOf(res))
+ }
+
+ typ := pass.TypesInfo.TypeOf(res)
+ retStmtTypes[i][typ] = struct{}{}
+ }
+ }
+
+ return false
+ default:
+ return true
+ }
+ })
+
+ // Compare func return types with the return statement types
+ var nextIdx int
+
+ for _, result := range funcDecl.Type.Results.List {
+ resType := result.Type
+ typ := pass.TypesInfo.TypeOf(resType)
+
+ consumeCount := 1
+ if len(result.Names) > 0 {
+ consumeCount = len(result.Names)
+ }
+
+ currentIdx := nextIdx
+ nextIdx += consumeCount
+
+ // Check return type
+ if !types.IsInterface(typ) {
+ // it is a concrete type
+ continue
+ }
+
+ if typ.String() == "error" {
+ // very common case to have return type error
+ continue
+ }
+
+ if !fromSamePackage(pass, typ) {
+ // ignore interface from other package
+ continue
+ }
+
+ // Check statement type
+ stmtTyps := retStmtTypes[currentIdx]
+
+ stmtTypsSize := len(stmtTyps)
+ if stmtTypsSize > 1 {
+ // it has multiple implementation
+ continue
+ }
+
+ if stmtTypsSize == 0 {
+ // function use named return value, while return statement is empty
+ continue
+ }
+
+ var stmtTyp types.Type
+ for t := range stmtTyps {
+ // expect only one, we don't have to break it
+ stmtTyp = t
+ }
+
+ if types.IsInterface(stmtTyp) {
+ // not concrete type, skip
+ continue
+ }
+
+ if r.debug {
+ fmt.Printf("stmtType: %v %v | %v %v\n", stmtTyp, reflect.TypeOf(stmtTyp), stmtTyp.Underlying(), reflect.TypeOf(stmtTyp.Underlying()))
+ }
+
+ switch stmtTyp := stmtTyp.(type) {
+ case *types.Basic:
+ if stmtTyp.Kind() == types.UntypedNil {
+ // ignore nil
+ continue
+ }
+ case *types.Named:
+ if _, ok := stmtTyp.Underlying().(*types.Signature); ok {
+ // skip function type
+ continue
+ }
+ }
+
+ retTypeName := typ.String()
+ if fromSamePackage(pass, typ) {
+ retTypeName = removePkgPrefix(retTypeName)
+ }
+
+ stmtTypName := stmtTyp.String()
+ if fromSamePackage(pass, stmtTyp) {
+ stmtTypName = removePkgPrefix(stmtTypName)
+ }
+
+ pass.Reportf(result.Pos(),
+ "%s function return %s interface at the %s result, abstract a single concrete implementation of %s",
+ funcDecl.Name.Name,
+ retTypeName,
+ positionStr(currentIdx),
+ stmtTypName)
+ }
+ })
+
+ return nil, nil
+}
+
+func positionStr(idx int) string {
+ switch idx {
+ case 0:
+ return "1st"
+ case 1:
+ return "2nd"
+ case 2:
+ return "3rd"
+ default:
+ return fmt.Sprintf("%dth", idx+1)
+ }
+}
+
+func fromSamePackage(pass *analysis.Pass, typ types.Type) bool {
+ switch typ := typ.(type) {
+ case *types.Named:
+ currentPkg := pass.Pkg
+ ifacePkg := typ.Obj().Pkg()
+
+ return currentPkg == ifacePkg
+ case *types.Pointer:
+ return fromSamePackage(pass, typ.Elem())
+ default:
+ return false
+ }
+}
+
+func removePkgPrefix(typeStr string) string {
+ if typeStr[0] == '*' {
+ return "*" + removePkgPrefix(typeStr[1:])
+ }
+
+ if lastDot := strings.LastIndex(typeStr, "."); lastDot != -1 {
+ return typeStr[lastDot+1:]
+ }
+
+ return typeStr
+}
diff --git a/vendor/github.com/uudashr/iface/unused/doc.go b/vendor/github.com/uudashr/iface/unused/doc.go
new file mode 100644
index 000000000..d5b6f24bb
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/unused/doc.go
@@ -0,0 +1,3 @@
+// Package unused defines an Analyzer that indetifies interfaces that are not
+// used anywhere in the same package where the interface is defined.
+package unused
diff --git a/vendor/github.com/uudashr/iface/unused/unused.go b/vendor/github.com/uudashr/iface/unused/unused.go
new file mode 100644
index 000000000..c2efbf52c
--- /dev/null
+++ b/vendor/github.com/uudashr/iface/unused/unused.go
@@ -0,0 +1,138 @@
+package unused
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "reflect"
+ "slices"
+ "strings"
+
+ "github.com/uudashr/iface/internal/directive"
+ "golang.org/x/tools/go/analysis"
+ "golang.org/x/tools/go/analysis/passes/inspect"
+ "golang.org/x/tools/go/ast/inspector"
+)
+
+// Analyzer is the unused interface analyzer.
+var Analyzer = newAnalyzer()
+
+func newAnalyzer() *analysis.Analyzer {
+ r := runner{}
+
+ analyzer := &analysis.Analyzer{
+ Name: "unused",
+ Doc: "Identifies interfaces that are not used anywhere in the same package where the interface is defined",
+ URL: "https://pkg.go.dev/github.com/uudashr/iface/unused",
+ Requires: []*analysis.Analyzer{inspect.Analyzer},
+ Run: r.run,
+ }
+
+ analyzer.Flags.BoolVar(&r.debug, "debug", false, "enable debug mode")
+ analyzer.Flags.StringVar(&r.exclude, "exclude", "", "comma-separated list of packages to exclude from the check")
+
+ return analyzer
+}
+
+type runner struct {
+ debug bool
+ exclude string
+}
+
+func (r *runner) run(pass *analysis.Pass) (interface{}, error) {
+ excludes := strings.Split(r.exclude, ",")
+ if slices.Contains(excludes, pass.Pkg.Path()) {
+ return nil, nil
+ }
+
+ inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
+
+ // Collect all interface type declarations
+ ifaceDecls := make(map[string]token.Pos)
+
+ nodeFilter := []ast.Node{
+ (*ast.GenDecl)(nil),
+ }
+
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ decl, ok := n.(*ast.GenDecl)
+ if !ok {
+ return
+ }
+
+ if r.debug {
+ fmt.Printf("GenDecl: %v specs=%d\n", decl.Tok, len(decl.Specs))
+ }
+
+ if decl.Tok != token.TYPE {
+ return
+ }
+
+ for i, spec := range decl.Specs {
+ if r.debug {
+ fmt.Printf(" spec[%d]: %v %v\n", i, spec, reflect.TypeOf(spec))
+ }
+
+ ts, ok := spec.(*ast.TypeSpec)
+ if !ok {
+ continue
+ }
+
+ _, ok = ts.Type.(*ast.InterfaceType)
+ if !ok {
+ return
+ }
+
+ if r.debug {
+ fmt.Println(" Interface type declaration:", ts.Name.Name, ts.Pos())
+ }
+
+ dir := directive.ParseIgnore(decl.Doc)
+ if dir != nil && dir.ShouldIgnore(pass.Analyzer.Name) {
+ // skip due to ignore directive
+ continue
+ }
+
+ ifaceDecls[ts.Name.Name] = ts.Pos()
+ }
+ })
+
+ if r.debug {
+ var ifaceNames []string
+ for name := range ifaceDecls {
+ ifaceNames = append(ifaceNames, name)
+ }
+
+ fmt.Println("Declared interfaces:", ifaceNames)
+ }
+
+ // Inspect whether the interface is used within the package
+ nodeFilter = []ast.Node{
+ (*ast.Ident)(nil),
+ }
+
+ inspect.Preorder(nodeFilter, func(n ast.Node) {
+ ident, ok := n.(*ast.Ident)
+ if !ok {
+ return
+ }
+
+ pos := ifaceDecls[ident.Name]
+ if pos == ident.Pos() {
+ // The identifier is the interface type declaration
+ return
+ }
+
+ delete(ifaceDecls, ident.Name)
+ })
+
+ if r.debug {
+ fmt.Printf("Package %s %s\n", pass.Pkg.Path(), pass.Pkg.Name())
+ }
+
+ for name, pos := range ifaceDecls {
+ pass.Reportf(pos, "interface %s is declared but not used within the package", name)
+ }
+
+ return nil, nil
+}
diff --git a/vendor/go-simpler.org/musttag/builtins.go b/vendor/go-simpler.org/musttag/builtins.go
index 3305513f8..60fa89413 100644
--- a/vendor/go-simpler.org/musttag/builtins.go
+++ b/vendor/go-simpler.org/musttag/builtins.go
@@ -3,131 +3,65 @@ package musttag
// builtins is a set of functions supported out of the box.
var builtins = []Func{
// https://pkg.go.dev/encoding/json
- {
- Name: "encoding/json.Marshal", Tag: "json", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "encoding/json.MarshalIndent", Tag: "json", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "encoding/json.Unmarshal", Tag: "json", ArgPos: 1,
- ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "(*encoding/json.Encoder).Encode", Tag: "json", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/json.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "(*encoding/json.Decoder).Decode", Tag: "json", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"},
- },
+ {"encoding/json.Marshal", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}},
+ {"encoding/json.MarshalIndent", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}},
+ {"encoding/json.Unmarshal", "json", 1, []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"(*encoding/json.Encoder).Encode", "json", 0, []string{"encoding/json.Marshaler", "encoding.TextMarshaler"}},
+ {"(*encoding/json.Decoder).Decode", "json", 0, []string{"encoding/json.Unmarshaler", "encoding.TextUnmarshaler"}},
// https://pkg.go.dev/encoding/xml
- {
- Name: "encoding/xml.Marshal", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "encoding/xml.MarshalIndent", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "encoding/xml.Unmarshal", Tag: "xml", ArgPos: 1,
- ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "(*encoding/xml.Encoder).Encode", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "(*encoding/xml.Decoder).Decode", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "(*encoding/xml.Encoder).EncodeElement", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"},
- },
- {
- Name: "(*encoding/xml.Decoder).DecodeElement", Tag: "xml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
+ {"encoding/xml.Marshal", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}},
+ {"encoding/xml.MarshalIndent", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}},
+ {"encoding/xml.Unmarshal", "xml", 1, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"(*encoding/xml.Encoder).Encode", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}},
+ {"(*encoding/xml.Decoder).Decode", "xml", 0, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"(*encoding/xml.Encoder).EncodeElement", "xml", 0, []string{"encoding/xml.Marshaler", "encoding.TextMarshaler"}},
+ {"(*encoding/xml.Decoder).DecodeElement", "xml", 0, []string{"encoding/xml.Unmarshaler", "encoding.TextUnmarshaler"}},
// https://pkg.go.dev/gopkg.in/yaml.v3
- {
- Name: "gopkg.in/yaml.v3.Marshal", Tag: "yaml", ArgPos: 0,
- ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"},
- },
- {
- Name: "gopkg.in/yaml.v3.Unmarshal", Tag: "yaml", ArgPos: 1,
- ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"},
- },
- {
- Name: "(*gopkg.in/yaml.v3.Encoder).Encode", Tag: "yaml", ArgPos: 0,
- ifaceWhitelist: []string{"gopkg.in/yaml.v3.Marshaler"},
- },
- {
- Name: "(*gopkg.in/yaml.v3.Decoder).Decode", Tag: "yaml", ArgPos: 0,
- ifaceWhitelist: []string{"gopkg.in/yaml.v3.Unmarshaler"},
- },
+ {"gopkg.in/yaml.v3.Marshal", "yaml", 0, []string{"gopkg.in/yaml.v3.Marshaler"}},
+ {"gopkg.in/yaml.v3.Unmarshal", "yaml", 1, []string{"gopkg.in/yaml.v3.Unmarshaler"}},
+ {"(*gopkg.in/yaml.v3.Encoder).Encode", "yaml", 0, []string{"gopkg.in/yaml.v3.Marshaler"}},
+ {"(*gopkg.in/yaml.v3.Decoder).Decode", "yaml", 0, []string{"gopkg.in/yaml.v3.Unmarshaler"}},
// https://pkg.go.dev/github.com/BurntSushi/toml
- {
- Name: "github.com/BurntSushi/toml.Unmarshal", Tag: "toml", ArgPos: 1,
- ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "github.com/BurntSushi/toml.Decode", Tag: "toml", ArgPos: 1,
- ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "github.com/BurntSushi/toml.DecodeFS", Tag: "toml", ArgPos: 2,
- ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "github.com/BurntSushi/toml.DecodeFile", Tag: "toml", ArgPos: 1,
- ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
- {
- Name: "(*github.com/BurntSushi/toml.Encoder).Encode", Tag: "toml", ArgPos: 0,
- ifaceWhitelist: []string{"encoding.TextMarshaler"},
- },
- {
- Name: "(*github.com/BurntSushi/toml.Decoder).Decode", Tag: "toml", ArgPos: 0,
- ifaceWhitelist: []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"},
- },
+ {"github.com/BurntSushi/toml.Unmarshal", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"github.com/BurntSushi/toml.Decode", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"github.com/BurntSushi/toml.DecodeFS", "toml", 2, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"github.com/BurntSushi/toml.DecodeFile", "toml", 1, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}},
+ {"(*github.com/BurntSushi/toml.Encoder).Encode", "toml", 0, []string{"encoding.TextMarshaler"}},
+ {"(*github.com/BurntSushi/toml.Decoder).Decode", "toml", 0, []string{"github.com/BurntSushi/toml.Unmarshaler", "encoding.TextUnmarshaler"}},
// https://pkg.go.dev/github.com/mitchellh/mapstructure
- {Name: "github.com/mitchellh/mapstructure.Decode", Tag: "mapstructure", ArgPos: 1},
- {Name: "github.com/mitchellh/mapstructure.DecodeMetadata", Tag: "mapstructure", ArgPos: 1},
- {Name: "github.com/mitchellh/mapstructure.WeakDecode", Tag: "mapstructure", ArgPos: 1},
- {Name: "github.com/mitchellh/mapstructure.WeakDecodeMetadata", Tag: "mapstructure", ArgPos: 1},
+ {"github.com/mitchellh/mapstructure.Decode", "mapstructure", 1, nil},
+ {"github.com/mitchellh/mapstructure.DecodeMetadata", "mapstructure", 1, nil},
+ {"github.com/mitchellh/mapstructure.WeakDecode", "mapstructure", 1, nil},
+ {"github.com/mitchellh/mapstructure.WeakDecodeMetadata", "mapstructure", 1, nil},
// https://pkg.go.dev/github.com/jmoiron/sqlx
- {Name: "github.com/jmoiron/sqlx.Get", Tag: "db", ArgPos: 1},
- {Name: "github.com/jmoiron/sqlx.GetContext", Tag: "db", ArgPos: 2},
- {Name: "github.com/jmoiron/sqlx.Select", Tag: "db", ArgPos: 1},
- {Name: "github.com/jmoiron/sqlx.SelectContext", Tag: "db", ArgPos: 2},
- {Name: "github.com/jmoiron/sqlx.StructScan", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Conn).GetContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Conn).SelectContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.DB).Get", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.DB).GetContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.DB).Select", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.DB).SelectContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Get", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.NamedStmt).GetContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.NamedStmt).Select", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Row).StructScan", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Rows).StructScan", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Stmt).Get", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Stmt).GetContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Stmt).Select", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Stmt).SelectContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Tx).Get", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Tx).GetContext", Tag: "db", ArgPos: 1},
- {Name: "(*github.com/jmoiron/sqlx.Tx).Select", Tag: "db", ArgPos: 0},
- {Name: "(*github.com/jmoiron/sqlx.Tx).SelectContext", Tag: "db", ArgPos: 1},
+ {"github.com/jmoiron/sqlx.Get", "db", 1, []string{"database/sql.Scanner"}},
+ {"github.com/jmoiron/sqlx.GetContext", "db", 2, []string{"database/sql.Scanner"}},
+ {"github.com/jmoiron/sqlx.Select", "db", 1, []string{"database/sql.Scanner"}},
+ {"github.com/jmoiron/sqlx.SelectContext", "db", 2, []string{"database/sql.Scanner"}},
+ {"github.com/jmoiron/sqlx.StructScan", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Conn).GetContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Conn).SelectContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.DB).Get", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.DB).GetContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.DB).Select", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.DB).SelectContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.NamedStmt).Get", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.NamedStmt).GetContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.NamedStmt).Select", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.NamedStmt).SelectContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Row).StructScan", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Rows).StructScan", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Stmt).Get", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Stmt).GetContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Stmt).Select", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Stmt).SelectContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Tx).Get", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Tx).GetContext", "db", 1, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Tx).Select", "db", 0, []string{"database/sql.Scanner"}},
+ {"(*github.com/jmoiron/sqlx.Tx).SelectContext", "db", 1, []string{"database/sql.Scanner"}},
}
diff --git a/vendor/go-simpler.org/musttag/musttag.go b/vendor/go-simpler.org/musttag/musttag.go
index 70c84201b..f8c0352d2 100644
--- a/vendor/go-simpler.org/musttag/musttag.go
+++ b/vendor/go-simpler.org/musttag/musttag.go
@@ -91,17 +91,17 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any,
call, ok := node.(*ast.CallExpr)
if !ok {
- return // not a function call.
+ return
}
callee := typeutil.StaticCallee(pass.TypesInfo, call)
if callee == nil {
- return // not a static call.
+ return
}
fn, ok := funcs[cutVendor(callee.FullName())]
if !ok {
- return // unsupported function.
+ return
}
if len(call.Args) <= fn.ArgPos {
@@ -116,7 +116,7 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any,
typ := pass.TypesInfo.TypeOf(arg)
if typ == nil {
- return // no type info found.
+ return
}
checker := checker{
@@ -125,9 +125,8 @@ func run(pass *analysis.Pass, mainModule string, funcs map[string]Func) (_ any,
ifaceWhitelist: fn.ifaceWhitelist,
imports: pass.Pkg.Imports(),
}
-
- if valid := checker.checkType(typ, fn.Tag); valid {
- return // nothing to report.
+ if checker.isValidType(typ, fn.Tag) {
+ return
}
pass.Reportf(arg.Pos(), "the given struct should be annotated with the `%s` tag", fn.Tag)
@@ -143,43 +142,32 @@ type checker struct {
imports []*types.Package
}
-func (c *checker) checkType(typ types.Type, tag string) bool {
+func (c *checker) isValidType(typ types.Type, tag string) bool {
if _, ok := c.seenTypes[typ.String()]; ok {
- return true // already checked.
+ return true
}
c.seenTypes[typ.String()] = struct{}{}
styp, ok := c.parseStruct(typ)
if !ok {
- return true // not a struct.
+ return true
}
- return c.checkStruct(styp, tag)
+ return c.isValidStruct(styp, tag)
}
-// recursively unwrap a type until we get to an underlying
-// raw struct type that should have its fields checked
-//
-// SomeStruct -> struct{SomeStructField: ... }
-// []*SomeStruct -> struct{SomeStructField: ... }
-// ...
-//
-// exits early if it hits a type that implements a whitelisted interface
func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) {
if implementsInterface(typ, c.ifaceWhitelist, c.imports) {
- return nil, false // the type implements a Marshaler interface; see issue #64.
+ return nil, false
}
switch typ := typ.(type) {
case *types.Pointer:
return c.parseStruct(typ.Elem())
-
case *types.Array:
return c.parseStruct(typ.Elem())
-
case *types.Slice:
return c.parseStruct(typ.Elem())
-
case *types.Map:
return c.parseStruct(typ.Elem())
@@ -205,7 +193,7 @@ func (c *checker) parseStruct(typ types.Type) (*types.Struct, bool) {
}
}
-func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) {
+func (c *checker) isValidStruct(styp *types.Struct, tag string) bool {
for i := 0; i < styp.NumFields(); i++ {
field := styp.Field(i)
if !field.Exported() {
@@ -214,18 +202,18 @@ func (c *checker) checkStruct(styp *types.Struct, tag string) (valid bool) {
tagValue, ok := reflect.StructTag(styp.Tag(i)).Lookup(tag)
if !ok {
- // tag is not required for embedded types; see issue #12.
+ // tag is not required for embedded types.
if !field.Embedded() {
return false
}
}
- // Do not recurse into ignored fields.
+ // the field is explicitly ignored.
if tagValue == "-" {
continue
}
- if valid := c.checkType(field.Type(), tag); !valid {
+ if !c.isValidType(field.Type(), tag) {
return false
}
}
@@ -254,25 +242,29 @@ func implementsInterface(typ types.Type, ifaces []string, imports []*types.Packa
}
for _, ifacePath := range ifaces {
- // "encoding/json.Marshaler" -> "encoding/json" + "Marshaler"
+ // e.g. "encoding/json.Marshaler" -> "encoding/json" + "Marshaler".
idx := strings.LastIndex(ifacePath, ".")
if idx == -1 {
continue
}
+
pkgName, ifaceName := ifacePath[:idx], ifacePath[idx+1:]
scope, ok := findScope(pkgName)
if !ok {
continue
}
+
obj := scope.Lookup(ifaceName)
if obj == nil {
continue
}
+
iface, ok := obj.Type().Underlying().(*types.Interface)
if !ok {
continue
}
+
if types.Implements(typ, iface) || types.Implements(types.NewPointer(typ), iface) {
return true
}
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go
index 3b974754c..f9057fd27 100644
--- a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_linux.go
@@ -25,15 +25,18 @@ package runtime
import (
"errors"
- "math"
cg "go.uber.org/automaxprocs/internal/cgroups"
)
// CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process
-// to a valid GOMAXPROCS value.
-func CPUQuotaToGOMAXPROCS(minValue int) (int, CPUQuotaStatus, error) {
- cgroups, err := newQueryer()
+// to a valid GOMAXPROCS value. The quota is converted from float to int using round.
+// If round == nil, DefaultRoundFunc is used.
+func CPUQuotaToGOMAXPROCS(minValue int, round func(v float64) int) (int, CPUQuotaStatus, error) {
+ if round == nil {
+ round = DefaultRoundFunc
+ }
+ cgroups, err := _newQueryer()
if err != nil {
return -1, CPUQuotaUndefined, err
}
@@ -43,7 +46,7 @@ func CPUQuotaToGOMAXPROCS(minValue int) (int, CPUQuotaStatus, error) {
return -1, CPUQuotaUndefined, err
}
- maxProcs := int(math.Floor(quota))
+ maxProcs := round(quota)
if minValue > 0 && maxProcs < minValue {
return minValue, CPUQuotaMinUsed, nil
}
@@ -57,6 +60,7 @@ type queryer interface {
var (
_newCgroups2 = cg.NewCGroups2ForCurrentProcess
_newCgroups = cg.NewCGroupsForCurrentProcess
+ _newQueryer = newQueryer
)
func newQueryer() (queryer, error) {
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go
index 692255448..e74701508 100644
--- a/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/cpu_quota_unsupported.go
@@ -26,6 +26,6 @@ package runtime
// CPUQuotaToGOMAXPROCS converts the CPU quota applied to the calling process
// to a valid GOMAXPROCS value. This is Linux-specific and not supported in the
// current OS.
-func CPUQuotaToGOMAXPROCS(_ int) (int, CPUQuotaStatus, error) {
+func CPUQuotaToGOMAXPROCS(_ int, _ func(v float64) int) (int, CPUQuotaStatus, error) {
return -1, CPUQuotaUndefined, nil
}
diff --git a/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go
index df6eacf05..f8a2834ac 100644
--- a/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go
+++ b/vendor/go.uber.org/automaxprocs/internal/runtime/runtime.go
@@ -20,6 +20,8 @@
package runtime
+import "math"
+
// CPUQuotaStatus presents the status of how CPU quota is used
type CPUQuotaStatus int
@@ -31,3 +33,8 @@ const (
// CPUQuotaMinUsed is returned when CPU quota is smaller than the min value
CPUQuotaMinUsed
)
+
+// DefaultRoundFunc is the default function to convert CPU quota from float to int. It rounds the value down (floor).
+func DefaultRoundFunc(v float64) int {
+ return int(math.Floor(v))
+}
diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go
index 98176d645..e561fe60b 100644
--- a/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go
+++ b/vendor/go.uber.org/automaxprocs/maxprocs/maxprocs.go
@@ -37,9 +37,10 @@ func currentMaxProcs() int {
}
type config struct {
- printf func(string, ...interface{})
- procs func(int) (int, iruntime.CPUQuotaStatus, error)
- minGOMAXPROCS int
+ printf func(string, ...interface{})
+ procs func(int, func(v float64) int) (int, iruntime.CPUQuotaStatus, error)
+ minGOMAXPROCS int
+ roundQuotaFunc func(v float64) int
}
func (c *config) log(fmt string, args ...interface{}) {
@@ -71,6 +72,13 @@ func Min(n int) Option {
})
}
+// RoundQuotaFunc sets the function that will be used to covert the CPU quota from float to int.
+func RoundQuotaFunc(rf func(v float64) int) Option {
+ return optionFunc(func(cfg *config) {
+ cfg.roundQuotaFunc = rf
+ })
+}
+
type optionFunc func(*config)
func (of optionFunc) apply(cfg *config) { of(cfg) }
@@ -82,8 +90,9 @@ func (of optionFunc) apply(cfg *config) { of(cfg) }
// configured CPU quota.
func Set(opts ...Option) (func(), error) {
cfg := &config{
- procs: iruntime.CPUQuotaToGOMAXPROCS,
- minGOMAXPROCS: 1,
+ procs: iruntime.CPUQuotaToGOMAXPROCS,
+ roundQuotaFunc: iruntime.DefaultRoundFunc,
+ minGOMAXPROCS: 1,
}
for _, o := range opts {
o.apply(cfg)
@@ -102,7 +111,7 @@ func Set(opts ...Option) (func(), error) {
return undoNoop, nil
}
- maxProcs, status, err := cfg.procs(cfg.minGOMAXPROCS)
+ maxProcs, status, err := cfg.procs(cfg.minGOMAXPROCS, cfg.roundQuotaFunc)
if err != nil {
return undoNoop, err
}
diff --git a/vendor/go.uber.org/automaxprocs/maxprocs/version.go b/vendor/go.uber.org/automaxprocs/maxprocs/version.go
index 108a95535..cc7fc5aee 100644
--- a/vendor/go.uber.org/automaxprocs/maxprocs/version.go
+++ b/vendor/go.uber.org/automaxprocs/maxprocs/version.go
@@ -21,4 +21,4 @@
package maxprocs
// Version is the current package version.
-const Version = "1.5.2"
+const Version = "1.6.0"
diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go
index db42e6676..c709b7284 100644
--- a/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go
+++ b/vendor/golang.org/x/crypto/chacha20/chacha_noasm.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build (!arm64 && !s390x && !ppc64le) || !gc || purego
+//go:build (!arm64 && !s390x && !ppc64 && !ppc64le) || !gc || purego
package chacha20
diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go
index 3a4287f99..bd183d9ba 100644
--- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.go
+++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build gc && !purego
+//go:build gc && !purego && (ppc64 || ppc64le)
package chacha20
diff --git a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s
index c672ccf69..a660b4112 100644
--- a/vendor/golang.org/x/crypto/chacha20/chacha_ppc64le.s
+++ b/vendor/golang.org/x/crypto/chacha20/chacha_ppc64x.s
@@ -19,7 +19,7 @@
// The differences in this and the original implementation are
// due to the calling conventions and initialization of constants.
-//go:build gc && !purego
+//go:build gc && !purego && (ppc64 || ppc64le)
#include "textflag.h"
@@ -36,32 +36,68 @@
// for VPERMXOR
#define MASK R18
-DATA consts<>+0x00(SB)/8, $0x3320646e61707865
-DATA consts<>+0x08(SB)/8, $0x6b20657479622d32
-DATA consts<>+0x10(SB)/8, $0x0000000000000001
-DATA consts<>+0x18(SB)/8, $0x0000000000000000
-DATA consts<>+0x20(SB)/8, $0x0000000000000004
-DATA consts<>+0x28(SB)/8, $0x0000000000000000
-DATA consts<>+0x30(SB)/8, $0x0a0b08090e0f0c0d
-DATA consts<>+0x38(SB)/8, $0x0203000106070405
-DATA consts<>+0x40(SB)/8, $0x090a0b080d0e0f0c
-DATA consts<>+0x48(SB)/8, $0x0102030005060704
-DATA consts<>+0x50(SB)/8, $0x6170786561707865
-DATA consts<>+0x58(SB)/8, $0x6170786561707865
-DATA consts<>+0x60(SB)/8, $0x3320646e3320646e
-DATA consts<>+0x68(SB)/8, $0x3320646e3320646e
-DATA consts<>+0x70(SB)/8, $0x79622d3279622d32
-DATA consts<>+0x78(SB)/8, $0x79622d3279622d32
-DATA consts<>+0x80(SB)/8, $0x6b2065746b206574
-DATA consts<>+0x88(SB)/8, $0x6b2065746b206574
-DATA consts<>+0x90(SB)/8, $0x0000000100000000
-DATA consts<>+0x98(SB)/8, $0x0000000300000002
-DATA consts<>+0xa0(SB)/8, $0x5566774411223300
-DATA consts<>+0xa8(SB)/8, $0xddeeffcc99aabb88
-DATA consts<>+0xb0(SB)/8, $0x6677445522330011
-DATA consts<>+0xb8(SB)/8, $0xeeffccddaabb8899
+DATA consts<>+0x00(SB)/4, $0x61707865
+DATA consts<>+0x04(SB)/4, $0x3320646e
+DATA consts<>+0x08(SB)/4, $0x79622d32
+DATA consts<>+0x0c(SB)/4, $0x6b206574
+DATA consts<>+0x10(SB)/4, $0x00000001
+DATA consts<>+0x14(SB)/4, $0x00000000
+DATA consts<>+0x18(SB)/4, $0x00000000
+DATA consts<>+0x1c(SB)/4, $0x00000000
+DATA consts<>+0x20(SB)/4, $0x00000004
+DATA consts<>+0x24(SB)/4, $0x00000000
+DATA consts<>+0x28(SB)/4, $0x00000000
+DATA consts<>+0x2c(SB)/4, $0x00000000
+DATA consts<>+0x30(SB)/4, $0x0e0f0c0d
+DATA consts<>+0x34(SB)/4, $0x0a0b0809
+DATA consts<>+0x38(SB)/4, $0x06070405
+DATA consts<>+0x3c(SB)/4, $0x02030001
+DATA consts<>+0x40(SB)/4, $0x0d0e0f0c
+DATA consts<>+0x44(SB)/4, $0x090a0b08
+DATA consts<>+0x48(SB)/4, $0x05060704
+DATA consts<>+0x4c(SB)/4, $0x01020300
+DATA consts<>+0x50(SB)/4, $0x61707865
+DATA consts<>+0x54(SB)/4, $0x61707865
+DATA consts<>+0x58(SB)/4, $0x61707865
+DATA consts<>+0x5c(SB)/4, $0x61707865
+DATA consts<>+0x60(SB)/4, $0x3320646e
+DATA consts<>+0x64(SB)/4, $0x3320646e
+DATA consts<>+0x68(SB)/4, $0x3320646e
+DATA consts<>+0x6c(SB)/4, $0x3320646e
+DATA consts<>+0x70(SB)/4, $0x79622d32
+DATA consts<>+0x74(SB)/4, $0x79622d32
+DATA consts<>+0x78(SB)/4, $0x79622d32
+DATA consts<>+0x7c(SB)/4, $0x79622d32
+DATA consts<>+0x80(SB)/4, $0x6b206574
+DATA consts<>+0x84(SB)/4, $0x6b206574
+DATA consts<>+0x88(SB)/4, $0x6b206574
+DATA consts<>+0x8c(SB)/4, $0x6b206574
+DATA consts<>+0x90(SB)/4, $0x00000000
+DATA consts<>+0x94(SB)/4, $0x00000001
+DATA consts<>+0x98(SB)/4, $0x00000002
+DATA consts<>+0x9c(SB)/4, $0x00000003
+DATA consts<>+0xa0(SB)/4, $0x11223300
+DATA consts<>+0xa4(SB)/4, $0x55667744
+DATA consts<>+0xa8(SB)/4, $0x99aabb88
+DATA consts<>+0xac(SB)/4, $0xddeeffcc
+DATA consts<>+0xb0(SB)/4, $0x22330011
+DATA consts<>+0xb4(SB)/4, $0x66774455
+DATA consts<>+0xb8(SB)/4, $0xaabb8899
+DATA consts<>+0xbc(SB)/4, $0xeeffccdd
GLOBL consts<>(SB), RODATA, $0xc0
+#ifdef GOARCH_ppc64
+#define BE_XXBRW_INIT() \
+ LVSL (R0)(R0), V24 \
+ VSPLTISB $3, V25 \
+ VXOR V24, V25, V24 \
+
+#define BE_XXBRW(vr) VPERM vr, vr, V24, vr
+#else
+#define BE_XXBRW_INIT()
+#define BE_XXBRW(vr)
+#endif
+
//func chaCha20_ctr32_vsx(out, inp *byte, len int, key *[8]uint32, counter *uint32)
TEXT ·chaCha20_ctr32_vsx(SB),NOSPLIT,$64-40
MOVD out+0(FP), OUT
@@ -94,6 +130,8 @@ TEXT ·chaCha20_ctr32_vsx(SB),NOSPLIT,$64-40
// Clear V27
VXOR V27, V27, V27
+ BE_XXBRW_INIT()
+
// V28
LXVW4X (CONSTBASE)(R11), VS60
@@ -299,6 +337,11 @@ loop_vsx:
VADDUWM V8, V18, V8
VADDUWM V12, V19, V12
+ BE_XXBRW(V0)
+ BE_XXBRW(V4)
+ BE_XXBRW(V8)
+ BE_XXBRW(V12)
+
CMPU LEN, $64
BLT tail_vsx
@@ -327,6 +370,11 @@ loop_vsx:
VADDUWM V9, V18, V8
VADDUWM V13, V19, V12
+ BE_XXBRW(V0)
+ BE_XXBRW(V4)
+ BE_XXBRW(V8)
+ BE_XXBRW(V12)
+
CMPU LEN, $64
BLT tail_vsx
@@ -334,8 +382,8 @@ loop_vsx:
LXVW4X (INP)(R8), VS60
LXVW4X (INP)(R9), VS61
LXVW4X (INP)(R10), VS62
- VXOR V27, V0, V27
+ VXOR V27, V0, V27
VXOR V28, V4, V28
VXOR V29, V8, V29
VXOR V30, V12, V30
@@ -354,6 +402,11 @@ loop_vsx:
VADDUWM V10, V18, V8
VADDUWM V14, V19, V12
+ BE_XXBRW(V0)
+ BE_XXBRW(V4)
+ BE_XXBRW(V8)
+ BE_XXBRW(V12)
+
CMPU LEN, $64
BLT tail_vsx
@@ -381,6 +434,11 @@ loop_vsx:
VADDUWM V11, V18, V8
VADDUWM V15, V19, V12
+ BE_XXBRW(V0)
+ BE_XXBRW(V4)
+ BE_XXBRW(V8)
+ BE_XXBRW(V12)
+
CMPU LEN, $64
BLT tail_vsx
@@ -408,9 +466,9 @@ loop_vsx:
done_vsx:
// Increment counter by number of 64 byte blocks
- MOVD (CNT), R14
+ MOVWZ (CNT), R14
ADD BLOCKS, R14
- MOVD R14, (CNT)
+ MOVWZ R14, (CNT)
RET
tail_vsx:
diff --git a/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go
index 333da285b..bd896bdc7 100644
--- a/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go
+++ b/vendor/golang.org/x/crypto/internal/poly1305/mac_noasm.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build (!amd64 && !ppc64le && !s390x) || !gc || purego
+//go:build (!amd64 && !ppc64le && !ppc64 && !s390x) || !gc || purego
package poly1305
diff --git a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go
index 4aec4874b..1a1679aaa 100644
--- a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.go
+++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.go
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build gc && !purego
+//go:build gc && !purego && (ppc64 || ppc64le)
package poly1305
diff --git a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s
index b3c1699bf..6899a1dab 100644
--- a/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64le.s
+++ b/vendor/golang.org/x/crypto/internal/poly1305/sum_ppc64x.s
@@ -2,15 +2,25 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:build gc && !purego
+//go:build gc && !purego && (ppc64 || ppc64le)
#include "textflag.h"
// This was ported from the amd64 implementation.
+#ifdef GOARCH_ppc64le
+#define LE_MOVD MOVD
+#define LE_MOVWZ MOVWZ
+#define LE_MOVHZ MOVHZ
+#else
+#define LE_MOVD MOVDBR
+#define LE_MOVWZ MOVWBR
+#define LE_MOVHZ MOVHBR
+#endif
+
#define POLY1305_ADD(msg, h0, h1, h2, t0, t1, t2) \
- MOVD (msg), t0; \
- MOVD 8(msg), t1; \
+ LE_MOVD (msg)( R0), t0; \
+ LE_MOVD (msg)(R24), t1; \
MOVD $1, t2; \
ADDC t0, h0, h0; \
ADDE t1, h1, h1; \
@@ -50,10 +60,6 @@
ADDE t3, h1, h1; \
ADDZE h2
-DATA ·poly1305Mask<>+0x00(SB)/8, $0x0FFFFFFC0FFFFFFF
-DATA ·poly1305Mask<>+0x08(SB)/8, $0x0FFFFFFC0FFFFFFC
-GLOBL ·poly1305Mask<>(SB), RODATA, $16
-
// func update(state *[7]uint64, msg []byte)
TEXT ·update(SB), $0-32
MOVD state+0(FP), R3
@@ -66,6 +72,8 @@ TEXT ·update(SB), $0-32
MOVD 24(R3), R11 // r0
MOVD 32(R3), R12 // r1
+ MOVD $8, R24
+
CMP R5, $16
BLT bytes_between_0_and_15
@@ -94,7 +102,7 @@ flush_buffer:
// Greater than 8 -- load the rightmost remaining bytes in msg
// and put into R17 (h1)
- MOVD (R4)(R21), R17
+ LE_MOVD (R4)(R21), R17
MOVD $16, R22
// Find the offset to those bytes
@@ -118,7 +126,7 @@ just1:
BLT less8
// Exactly 8
- MOVD (R4), R16
+ LE_MOVD (R4), R16
CMP R17, $0
@@ -133,7 +141,7 @@ less8:
MOVD $0, R22 // shift count
CMP R5, $4
BLT less4
- MOVWZ (R4), R16
+ LE_MOVWZ (R4), R16
ADD $4, R4
ADD $-4, R5
MOVD $32, R22
@@ -141,7 +149,7 @@ less8:
less4:
CMP R5, $2
BLT less2
- MOVHZ (R4), R21
+ LE_MOVHZ (R4), R21
SLD R22, R21, R21
OR R16, R21, R16
ADD $16, R22
diff --git a/vendor/golang.org/x/exp/slices/cmp.go b/vendor/golang.org/x/exp/slices/cmp.go
deleted file mode 100644
index fbf1934a0..000000000
--- a/vendor/golang.org/x/exp/slices/cmp.go
+++ /dev/null
@@ -1,44 +0,0 @@
-// Copyright 2023 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package slices
-
-import "golang.org/x/exp/constraints"
-
-// min is a version of the predeclared function from the Go 1.21 release.
-func min[T constraints.Ordered](a, b T) T {
- if a < b || isNaN(a) {
- return a
- }
- return b
-}
-
-// max is a version of the predeclared function from the Go 1.21 release.
-func max[T constraints.Ordered](a, b T) T {
- if a > b || isNaN(a) {
- return a
- }
- return b
-}
-
-// cmpLess is a copy of cmp.Less from the Go 1.21 release.
-func cmpLess[T constraints.Ordered](x, y T) bool {
- return (isNaN(x) && !isNaN(y)) || x < y
-}
-
-// cmpCompare is a copy of cmp.Compare from the Go 1.21 release.
-func cmpCompare[T constraints.Ordered](x, y T) int {
- xNaN := isNaN(x)
- yNaN := isNaN(y)
- if xNaN && yNaN {
- return 0
- }
- if xNaN || x < y {
- return -1
- }
- if yNaN || x > y {
- return +1
- }
- return 0
-}
diff --git a/vendor/golang.org/x/exp/slices/slices.go b/vendor/golang.org/x/exp/slices/slices.go
deleted file mode 100644
index 46ceac343..000000000
--- a/vendor/golang.org/x/exp/slices/slices.go
+++ /dev/null
@@ -1,515 +0,0 @@
-// Copyright 2021 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package slices defines various functions useful with slices of any type.
-package slices
-
-import (
- "unsafe"
-
- "golang.org/x/exp/constraints"
-)
-
-// Equal reports whether two slices are equal: the same length and all
-// elements equal. If the lengths are different, Equal returns false.
-// Otherwise, the elements are compared in increasing index order, and the
-// comparison stops at the first unequal pair.
-// Floating point NaNs are not considered equal.
-func Equal[S ~[]E, E comparable](s1, s2 S) bool {
- if len(s1) != len(s2) {
- return false
- }
- for i := range s1 {
- if s1[i] != s2[i] {
- return false
- }
- }
- return true
-}
-
-// EqualFunc reports whether two slices are equal using an equality
-// function on each pair of elements. If the lengths are different,
-// EqualFunc returns false. Otherwise, the elements are compared in
-// increasing index order, and the comparison stops at the first index
-// for which eq returns false.
-func EqualFunc[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, eq func(E1, E2) bool) bool {
- if len(s1) != len(s2) {
- return false
- }
- for i, v1 := range s1 {
- v2 := s2[i]
- if !eq(v1, v2) {
- return false
- }
- }
- return true
-}
-
-// Compare compares the elements of s1 and s2, using [cmp.Compare] on each pair
-// of elements. The elements are compared sequentially, starting at index 0,
-// until one element is not equal to the other.
-// The result of comparing the first non-matching elements is returned.
-// If both slices are equal until one of them ends, the shorter slice is
-// considered less than the longer one.
-// The result is 0 if s1 == s2, -1 if s1 < s2, and +1 if s1 > s2.
-func Compare[S ~[]E, E constraints.Ordered](s1, s2 S) int {
- for i, v1 := range s1 {
- if i >= len(s2) {
- return +1
- }
- v2 := s2[i]
- if c := cmpCompare(v1, v2); c != 0 {
- return c
- }
- }
- if len(s1) < len(s2) {
- return -1
- }
- return 0
-}
-
-// CompareFunc is like [Compare] but uses a custom comparison function on each
-// pair of elements.
-// The result is the first non-zero result of cmp; if cmp always
-// returns 0 the result is 0 if len(s1) == len(s2), -1 if len(s1) < len(s2),
-// and +1 if len(s1) > len(s2).
-func CompareFunc[S1 ~[]E1, S2 ~[]E2, E1, E2 any](s1 S1, s2 S2, cmp func(E1, E2) int) int {
- for i, v1 := range s1 {
- if i >= len(s2) {
- return +1
- }
- v2 := s2[i]
- if c := cmp(v1, v2); c != 0 {
- return c
- }
- }
- if len(s1) < len(s2) {
- return -1
- }
- return 0
-}
-
-// Index returns the index of the first occurrence of v in s,
-// or -1 if not present.
-func Index[S ~[]E, E comparable](s S, v E) int {
- for i := range s {
- if v == s[i] {
- return i
- }
- }
- return -1
-}
-
-// IndexFunc returns the first index i satisfying f(s[i]),
-// or -1 if none do.
-func IndexFunc[S ~[]E, E any](s S, f func(E) bool) int {
- for i := range s {
- if f(s[i]) {
- return i
- }
- }
- return -1
-}
-
-// Contains reports whether v is present in s.
-func Contains[S ~[]E, E comparable](s S, v E) bool {
- return Index(s, v) >= 0
-}
-
-// ContainsFunc reports whether at least one
-// element e of s satisfies f(e).
-func ContainsFunc[S ~[]E, E any](s S, f func(E) bool) bool {
- return IndexFunc(s, f) >= 0
-}
-
-// Insert inserts the values v... into s at index i,
-// returning the modified slice.
-// The elements at s[i:] are shifted up to make room.
-// In the returned slice r, r[i] == v[0],
-// and r[i+len(v)] == value originally at r[i].
-// Insert panics if i is out of range.
-// This function is O(len(s) + len(v)).
-func Insert[S ~[]E, E any](s S, i int, v ...E) S {
- m := len(v)
- if m == 0 {
- return s
- }
- n := len(s)
- if i == n {
- return append(s, v...)
- }
- if n+m > cap(s) {
- // Use append rather than make so that we bump the size of
- // the slice up to the next storage class.
- // This is what Grow does but we don't call Grow because
- // that might copy the values twice.
- s2 := append(s[:i], make(S, n+m-i)...)
- copy(s2[i:], v)
- copy(s2[i+m:], s[i:])
- return s2
- }
- s = s[:n+m]
-
- // before:
- // s: aaaaaaaabbbbccccccccdddd
- // ^ ^ ^ ^
- // i i+m n n+m
- // after:
- // s: aaaaaaaavvvvbbbbcccccccc
- // ^ ^ ^ ^
- // i i+m n n+m
- //
- // a are the values that don't move in s.
- // v are the values copied in from v.
- // b and c are the values from s that are shifted up in index.
- // d are the values that get overwritten, never to be seen again.
-
- if !overlaps(v, s[i+m:]) {
- // Easy case - v does not overlap either the c or d regions.
- // (It might be in some of a or b, or elsewhere entirely.)
- // The data we copy up doesn't write to v at all, so just do it.
-
- copy(s[i+m:], s[i:])
-
- // Now we have
- // s: aaaaaaaabbbbbbbbcccccccc
- // ^ ^ ^ ^
- // i i+m n n+m
- // Note the b values are duplicated.
-
- copy(s[i:], v)
-
- // Now we have
- // s: aaaaaaaavvvvbbbbcccccccc
- // ^ ^ ^ ^
- // i i+m n n+m
- // That's the result we want.
- return s
- }
-
- // The hard case - v overlaps c or d. We can't just shift up
- // the data because we'd move or clobber the values we're trying
- // to insert.
- // So instead, write v on top of d, then rotate.
- copy(s[n:], v)
-
- // Now we have
- // s: aaaaaaaabbbbccccccccvvvv
- // ^ ^ ^ ^
- // i i+m n n+m
-
- rotateRight(s[i:], m)
-
- // Now we have
- // s: aaaaaaaavvvvbbbbcccccccc
- // ^ ^ ^ ^
- // i i+m n n+m
- // That's the result we want.
- return s
-}
-
-// clearSlice sets all elements up to the length of s to the zero value of E.
-// We may use the builtin clear func instead, and remove clearSlice, when upgrading
-// to Go 1.21+.
-func clearSlice[S ~[]E, E any](s S) {
- var zero E
- for i := range s {
- s[i] = zero
- }
-}
-
-// Delete removes the elements s[i:j] from s, returning the modified slice.
-// Delete panics if j > len(s) or s[i:j] is not a valid slice of s.
-// Delete is O(len(s)-i), so if many items must be deleted, it is better to
-// make a single call deleting them all together than to delete one at a time.
-// Delete zeroes the elements s[len(s)-(j-i):len(s)].
-func Delete[S ~[]E, E any](s S, i, j int) S {
- _ = s[i:j:len(s)] // bounds check
-
- if i == j {
- return s
- }
-
- oldlen := len(s)
- s = append(s[:i], s[j:]...)
- clearSlice(s[len(s):oldlen]) // zero/nil out the obsolete elements, for GC
- return s
-}
-
-// DeleteFunc removes any elements from s for which del returns true,
-// returning the modified slice.
-// DeleteFunc zeroes the elements between the new length and the original length.
-func DeleteFunc[S ~[]E, E any](s S, del func(E) bool) S {
- i := IndexFunc(s, del)
- if i == -1 {
- return s
- }
- // Don't start copying elements until we find one to delete.
- for j := i + 1; j < len(s); j++ {
- if v := s[j]; !del(v) {
- s[i] = v
- i++
- }
- }
- clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC
- return s[:i]
-}
-
-// Replace replaces the elements s[i:j] by the given v, and returns the
-// modified slice. Replace panics if s[i:j] is not a valid slice of s.
-// When len(v) < (j-i), Replace zeroes the elements between the new length and the original length.
-func Replace[S ~[]E, E any](s S, i, j int, v ...E) S {
- _ = s[i:j] // verify that i:j is a valid subslice
-
- if i == j {
- return Insert(s, i, v...)
- }
- if j == len(s) {
- return append(s[:i], v...)
- }
-
- tot := len(s[:i]) + len(v) + len(s[j:])
- if tot > cap(s) {
- // Too big to fit, allocate and copy over.
- s2 := append(s[:i], make(S, tot-i)...) // See Insert
- copy(s2[i:], v)
- copy(s2[i+len(v):], s[j:])
- return s2
- }
-
- r := s[:tot]
-
- if i+len(v) <= j {
- // Easy, as v fits in the deleted portion.
- copy(r[i:], v)
- if i+len(v) != j {
- copy(r[i+len(v):], s[j:])
- }
- clearSlice(s[tot:]) // zero/nil out the obsolete elements, for GC
- return r
- }
-
- // We are expanding (v is bigger than j-i).
- // The situation is something like this:
- // (example has i=4,j=8,len(s)=16,len(v)=6)
- // s: aaaaxxxxbbbbbbbbyy
- // ^ ^ ^ ^
- // i j len(s) tot
- // a: prefix of s
- // x: deleted range
- // b: more of s
- // y: area to expand into
-
- if !overlaps(r[i+len(v):], v) {
- // Easy, as v is not clobbered by the first copy.
- copy(r[i+len(v):], s[j:])
- copy(r[i:], v)
- return r
- }
-
- // This is a situation where we don't have a single place to which
- // we can copy v. Parts of it need to go to two different places.
- // We want to copy the prefix of v into y and the suffix into x, then
- // rotate |y| spots to the right.
- //
- // v[2:] v[:2]
- // | |
- // s: aaaavvvvbbbbbbbbvv
- // ^ ^ ^ ^
- // i j len(s) tot
- //
- // If either of those two destinations don't alias v, then we're good.
- y := len(v) - (j - i) // length of y portion
-
- if !overlaps(r[i:j], v) {
- copy(r[i:j], v[y:])
- copy(r[len(s):], v[:y])
- rotateRight(r[i:], y)
- return r
- }
- if !overlaps(r[len(s):], v) {
- copy(r[len(s):], v[:y])
- copy(r[i:j], v[y:])
- rotateRight(r[i:], y)
- return r
- }
-
- // Now we know that v overlaps both x and y.
- // That means that the entirety of b is *inside* v.
- // So we don't need to preserve b at all; instead we
- // can copy v first, then copy the b part of v out of
- // v to the right destination.
- k := startIdx(v, s[j:])
- copy(r[i:], v)
- copy(r[i+len(v):], r[i+k:])
- return r
-}
-
-// Clone returns a copy of the slice.
-// The elements are copied using assignment, so this is a shallow clone.
-func Clone[S ~[]E, E any](s S) S {
- // Preserve nil in case it matters.
- if s == nil {
- return nil
- }
- return append(S([]E{}), s...)
-}
-
-// Compact replaces consecutive runs of equal elements with a single copy.
-// This is like the uniq command found on Unix.
-// Compact modifies the contents of the slice s and returns the modified slice,
-// which may have a smaller length.
-// Compact zeroes the elements between the new length and the original length.
-func Compact[S ~[]E, E comparable](s S) S {
- if len(s) < 2 {
- return s
- }
- i := 1
- for k := 1; k < len(s); k++ {
- if s[k] != s[k-1] {
- if i != k {
- s[i] = s[k]
- }
- i++
- }
- }
- clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC
- return s[:i]
-}
-
-// CompactFunc is like [Compact] but uses an equality function to compare elements.
-// For runs of elements that compare equal, CompactFunc keeps the first one.
-// CompactFunc zeroes the elements between the new length and the original length.
-func CompactFunc[S ~[]E, E any](s S, eq func(E, E) bool) S {
- if len(s) < 2 {
- return s
- }
- i := 1
- for k := 1; k < len(s); k++ {
- if !eq(s[k], s[k-1]) {
- if i != k {
- s[i] = s[k]
- }
- i++
- }
- }
- clearSlice(s[i:]) // zero/nil out the obsolete elements, for GC
- return s[:i]
-}
-
-// Grow increases the slice's capacity, if necessary, to guarantee space for
-// another n elements. After Grow(n), at least n elements can be appended
-// to the slice without another allocation. If n is negative or too large to
-// allocate the memory, Grow panics.
-func Grow[S ~[]E, E any](s S, n int) S {
- if n < 0 {
- panic("cannot be negative")
- }
- if n -= cap(s) - len(s); n > 0 {
- // TODO(https://go.dev/issue/53888): Make using []E instead of S
- // to workaround a compiler bug where the runtime.growslice optimization
- // does not take effect. Revert when the compiler is fixed.
- s = append([]E(s)[:cap(s)], make([]E, n)...)[:len(s)]
- }
- return s
-}
-
-// Clip removes unused capacity from the slice, returning s[:len(s):len(s)].
-func Clip[S ~[]E, E any](s S) S {
- return s[:len(s):len(s)]
-}
-
-// Rotation algorithm explanation:
-//
-// rotate left by 2
-// start with
-// 0123456789
-// split up like this
-// 01 234567 89
-// swap first 2 and last 2
-// 89 234567 01
-// join first parts
-// 89234567 01
-// recursively rotate first left part by 2
-// 23456789 01
-// join at the end
-// 2345678901
-//
-// rotate left by 8
-// start with
-// 0123456789
-// split up like this
-// 01 234567 89
-// swap first 2 and last 2
-// 89 234567 01
-// join last parts
-// 89 23456701
-// recursively rotate second part left by 6
-// 89 01234567
-// join at the end
-// 8901234567
-
-// TODO: There are other rotate algorithms.
-// This algorithm has the desirable property that it moves each element exactly twice.
-// The triple-reverse algorithm is simpler and more cache friendly, but takes more writes.
-// The follow-cycles algorithm can be 1-write but it is not very cache friendly.
-
-// rotateLeft rotates b left by n spaces.
-// s_final[i] = s_orig[i+r], wrapping around.
-func rotateLeft[E any](s []E, r int) {
- for r != 0 && r != len(s) {
- if r*2 <= len(s) {
- swap(s[:r], s[len(s)-r:])
- s = s[:len(s)-r]
- } else {
- swap(s[:len(s)-r], s[r:])
- s, r = s[len(s)-r:], r*2-len(s)
- }
- }
-}
-func rotateRight[E any](s []E, r int) {
- rotateLeft(s, len(s)-r)
-}
-
-// swap swaps the contents of x and y. x and y must be equal length and disjoint.
-func swap[E any](x, y []E) {
- for i := 0; i < len(x); i++ {
- x[i], y[i] = y[i], x[i]
- }
-}
-
-// overlaps reports whether the memory ranges a[0:len(a)] and b[0:len(b)] overlap.
-func overlaps[E any](a, b []E) bool {
- if len(a) == 0 || len(b) == 0 {
- return false
- }
- elemSize := unsafe.Sizeof(a[0])
- if elemSize == 0 {
- return false
- }
- // TODO: use a runtime/unsafe facility once one becomes available. See issue 12445.
- // Also see crypto/internal/alias/alias.go:AnyOverlap
- return uintptr(unsafe.Pointer(&a[0])) <= uintptr(unsafe.Pointer(&b[len(b)-1]))+(elemSize-1) &&
- uintptr(unsafe.Pointer(&b[0])) <= uintptr(unsafe.Pointer(&a[len(a)-1]))+(elemSize-1)
-}
-
-// startIdx returns the index in haystack where the needle starts.
-// prerequisite: the needle must be aliased entirely inside the haystack.
-func startIdx[E any](haystack, needle []E) int {
- p := &needle[0]
- for i := range haystack {
- if p == &haystack[i] {
- return i
- }
- }
- // TODO: what if the overlap is by a non-integral number of Es?
- panic("needle not found")
-}
-
-// Reverse reverses the elements of the slice in place.
-func Reverse[S ~[]E, E any](s S) {
- for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 {
- s[i], s[j] = s[j], s[i]
- }
-}
diff --git a/vendor/golang.org/x/exp/slices/sort.go b/vendor/golang.org/x/exp/slices/sort.go
deleted file mode 100644
index f58bbc7ba..000000000
--- a/vendor/golang.org/x/exp/slices/sort.go
+++ /dev/null
@@ -1,197 +0,0 @@
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:generate go run $GOROOT/src/sort/gen_sort_variants.go -exp
-
-package slices
-
-import (
- "math/bits"
-
- "golang.org/x/exp/constraints"
-)
-
-// Sort sorts a slice of any ordered type in ascending order.
-// When sorting floating-point numbers, NaNs are ordered before other values.
-func Sort[S ~[]E, E constraints.Ordered](x S) {
- n := len(x)
- pdqsortOrdered(x, 0, n, bits.Len(uint(n)))
-}
-
-// SortFunc sorts the slice x in ascending order as determined by the cmp
-// function. This sort is not guaranteed to be stable.
-// cmp(a, b) should return a negative number when a < b, a positive number when
-// a > b and zero when a == b or when a is not comparable to b in the sense
-// of the formal definition of Strict Weak Ordering.
-//
-// SortFunc requires that cmp is a strict weak ordering.
-// See https://en.wikipedia.org/wiki/Weak_ordering#Strict_weak_orderings.
-// To indicate 'uncomparable', return 0 from the function.
-func SortFunc[S ~[]E, E any](x S, cmp func(a, b E) int) {
- n := len(x)
- pdqsortCmpFunc(x, 0, n, bits.Len(uint(n)), cmp)
-}
-
-// SortStableFunc sorts the slice x while keeping the original order of equal
-// elements, using cmp to compare elements in the same way as [SortFunc].
-func SortStableFunc[S ~[]E, E any](x S, cmp func(a, b E) int) {
- stableCmpFunc(x, len(x), cmp)
-}
-
-// IsSorted reports whether x is sorted in ascending order.
-func IsSorted[S ~[]E, E constraints.Ordered](x S) bool {
- for i := len(x) - 1; i > 0; i-- {
- if cmpLess(x[i], x[i-1]) {
- return false
- }
- }
- return true
-}
-
-// IsSortedFunc reports whether x is sorted in ascending order, with cmp as the
-// comparison function as defined by [SortFunc].
-func IsSortedFunc[S ~[]E, E any](x S, cmp func(a, b E) int) bool {
- for i := len(x) - 1; i > 0; i-- {
- if cmp(x[i], x[i-1]) < 0 {
- return false
- }
- }
- return true
-}
-
-// Min returns the minimal value in x. It panics if x is empty.
-// For floating-point numbers, Min propagates NaNs (any NaN value in x
-// forces the output to be NaN).
-func Min[S ~[]E, E constraints.Ordered](x S) E {
- if len(x) < 1 {
- panic("slices.Min: empty list")
- }
- m := x[0]
- for i := 1; i < len(x); i++ {
- m = min(m, x[i])
- }
- return m
-}
-
-// MinFunc returns the minimal value in x, using cmp to compare elements.
-// It panics if x is empty. If there is more than one minimal element
-// according to the cmp function, MinFunc returns the first one.
-func MinFunc[S ~[]E, E any](x S, cmp func(a, b E) int) E {
- if len(x) < 1 {
- panic("slices.MinFunc: empty list")
- }
- m := x[0]
- for i := 1; i < len(x); i++ {
- if cmp(x[i], m) < 0 {
- m = x[i]
- }
- }
- return m
-}
-
-// Max returns the maximal value in x. It panics if x is empty.
-// For floating-point E, Max propagates NaNs (any NaN value in x
-// forces the output to be NaN).
-func Max[S ~[]E, E constraints.Ordered](x S) E {
- if len(x) < 1 {
- panic("slices.Max: empty list")
- }
- m := x[0]
- for i := 1; i < len(x); i++ {
- m = max(m, x[i])
- }
- return m
-}
-
-// MaxFunc returns the maximal value in x, using cmp to compare elements.
-// It panics if x is empty. If there is more than one maximal element
-// according to the cmp function, MaxFunc returns the first one.
-func MaxFunc[S ~[]E, E any](x S, cmp func(a, b E) int) E {
- if len(x) < 1 {
- panic("slices.MaxFunc: empty list")
- }
- m := x[0]
- for i := 1; i < len(x); i++ {
- if cmp(x[i], m) > 0 {
- m = x[i]
- }
- }
- return m
-}
-
-// BinarySearch searches for target in a sorted slice and returns the position
-// where target is found, or the position where target would appear in the
-// sort order; it also returns a bool saying whether the target is really found
-// in the slice. The slice must be sorted in increasing order.
-func BinarySearch[S ~[]E, E constraints.Ordered](x S, target E) (int, bool) {
- // Inlining is faster than calling BinarySearchFunc with a lambda.
- n := len(x)
- // Define x[-1] < target and x[n] >= target.
- // Invariant: x[i-1] < target, x[j] >= target.
- i, j := 0, n
- for i < j {
- h := int(uint(i+j) >> 1) // avoid overflow when computing h
- // i ≤ h < j
- if cmpLess(x[h], target) {
- i = h + 1 // preserves x[i-1] < target
- } else {
- j = h // preserves x[j] >= target
- }
- }
- // i == j, x[i-1] < target, and x[j] (= x[i]) >= target => answer is i.
- return i, i < n && (x[i] == target || (isNaN(x[i]) && isNaN(target)))
-}
-
-// BinarySearchFunc works like [BinarySearch], but uses a custom comparison
-// function. The slice must be sorted in increasing order, where "increasing"
-// is defined by cmp. cmp should return 0 if the slice element matches
-// the target, a negative number if the slice element precedes the target,
-// or a positive number if the slice element follows the target.
-// cmp must implement the same ordering as the slice, such that if
-// cmp(a, t) < 0 and cmp(b, t) >= 0, then a must precede b in the slice.
-func BinarySearchFunc[S ~[]E, E, T any](x S, target T, cmp func(E, T) int) (int, bool) {
- n := len(x)
- // Define cmp(x[-1], target) < 0 and cmp(x[n], target) >= 0 .
- // Invariant: cmp(x[i - 1], target) < 0, cmp(x[j], target) >= 0.
- i, j := 0, n
- for i < j {
- h := int(uint(i+j) >> 1) // avoid overflow when computing h
- // i ≤ h < j
- if cmp(x[h], target) < 0 {
- i = h + 1 // preserves cmp(x[i - 1], target) < 0
- } else {
- j = h // preserves cmp(x[j], target) >= 0
- }
- }
- // i == j, cmp(x[i-1], target) < 0, and cmp(x[j], target) (= cmp(x[i], target)) >= 0 => answer is i.
- return i, i < n && cmp(x[i], target) == 0
-}
-
-type sortedHint int // hint for pdqsort when choosing the pivot
-
-const (
- unknownHint sortedHint = iota
- increasingHint
- decreasingHint
-)
-
-// xorshift paper: https://www.jstatsoft.org/article/view/v008i14/xorshift.pdf
-type xorshift uint64
-
-func (r *xorshift) Next() uint64 {
- *r ^= *r << 13
- *r ^= *r >> 17
- *r ^= *r << 5
- return uint64(*r)
-}
-
-func nextPowerOfTwo(length int) uint {
- return 1 << bits.Len(uint(length))
-}
-
-// isNaN reports whether x is a NaN without requiring the math package.
-// This will always return false if T is not floating-point.
-func isNaN[T constraints.Ordered](x T) bool {
- return x != x
-}
diff --git a/vendor/golang.org/x/exp/slices/zsortanyfunc.go b/vendor/golang.org/x/exp/slices/zsortanyfunc.go
deleted file mode 100644
index 06f2c7a24..000000000
--- a/vendor/golang.org/x/exp/slices/zsortanyfunc.go
+++ /dev/null
@@ -1,479 +0,0 @@
-// Code generated by gen_sort_variants.go; DO NOT EDIT.
-
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package slices
-
-// insertionSortCmpFunc sorts data[a:b] using insertion sort.
-func insertionSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) {
- for i := a + 1; i < b; i++ {
- for j := i; j > a && (cmp(data[j], data[j-1]) < 0); j-- {
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
-}
-
-// siftDownCmpFunc implements the heap property on data[lo:hi].
-// first is an offset into the array where the root of the heap lies.
-func siftDownCmpFunc[E any](data []E, lo, hi, first int, cmp func(a, b E) int) {
- root := lo
- for {
- child := 2*root + 1
- if child >= hi {
- break
- }
- if child+1 < hi && (cmp(data[first+child], data[first+child+1]) < 0) {
- child++
- }
- if !(cmp(data[first+root], data[first+child]) < 0) {
- return
- }
- data[first+root], data[first+child] = data[first+child], data[first+root]
- root = child
- }
-}
-
-func heapSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) {
- first := a
- lo := 0
- hi := b - a
-
- // Build heap with greatest element at top.
- for i := (hi - 1) / 2; i >= 0; i-- {
- siftDownCmpFunc(data, i, hi, first, cmp)
- }
-
- // Pop elements, largest first, into end of data.
- for i := hi - 1; i >= 0; i-- {
- data[first], data[first+i] = data[first+i], data[first]
- siftDownCmpFunc(data, lo, i, first, cmp)
- }
-}
-
-// pdqsortCmpFunc sorts data[a:b].
-// The algorithm based on pattern-defeating quicksort(pdqsort), but without the optimizations from BlockQuicksort.
-// pdqsort paper: https://arxiv.org/pdf/2106.05123.pdf
-// C++ implementation: https://github.com/orlp/pdqsort
-// Rust implementation: https://docs.rs/pdqsort/latest/pdqsort/
-// limit is the number of allowed bad (very unbalanced) pivots before falling back to heapsort.
-func pdqsortCmpFunc[E any](data []E, a, b, limit int, cmp func(a, b E) int) {
- const maxInsertion = 12
-
- var (
- wasBalanced = true // whether the last partitioning was reasonably balanced
- wasPartitioned = true // whether the slice was already partitioned
- )
-
- for {
- length := b - a
-
- if length <= maxInsertion {
- insertionSortCmpFunc(data, a, b, cmp)
- return
- }
-
- // Fall back to heapsort if too many bad choices were made.
- if limit == 0 {
- heapSortCmpFunc(data, a, b, cmp)
- return
- }
-
- // If the last partitioning was imbalanced, we need to breaking patterns.
- if !wasBalanced {
- breakPatternsCmpFunc(data, a, b, cmp)
- limit--
- }
-
- pivot, hint := choosePivotCmpFunc(data, a, b, cmp)
- if hint == decreasingHint {
- reverseRangeCmpFunc(data, a, b, cmp)
- // The chosen pivot was pivot-a elements after the start of the array.
- // After reversing it is pivot-a elements before the end of the array.
- // The idea came from Rust's implementation.
- pivot = (b - 1) - (pivot - a)
- hint = increasingHint
- }
-
- // The slice is likely already sorted.
- if wasBalanced && wasPartitioned && hint == increasingHint {
- if partialInsertionSortCmpFunc(data, a, b, cmp) {
- return
- }
- }
-
- // Probably the slice contains many duplicate elements, partition the slice into
- // elements equal to and elements greater than the pivot.
- if a > 0 && !(cmp(data[a-1], data[pivot]) < 0) {
- mid := partitionEqualCmpFunc(data, a, b, pivot, cmp)
- a = mid
- continue
- }
-
- mid, alreadyPartitioned := partitionCmpFunc(data, a, b, pivot, cmp)
- wasPartitioned = alreadyPartitioned
-
- leftLen, rightLen := mid-a, b-mid
- balanceThreshold := length / 8
- if leftLen < rightLen {
- wasBalanced = leftLen >= balanceThreshold
- pdqsortCmpFunc(data, a, mid, limit, cmp)
- a = mid + 1
- } else {
- wasBalanced = rightLen >= balanceThreshold
- pdqsortCmpFunc(data, mid+1, b, limit, cmp)
- b = mid
- }
- }
-}
-
-// partitionCmpFunc does one quicksort partition.
-// Let p = data[pivot]
-// Moves elements in data[a:b] around, so that data[i]<p and data[j]>=p for i<newpivot and j>newpivot.
-// On return, data[newpivot] = p
-func partitionCmpFunc[E any](data []E, a, b, pivot int, cmp func(a, b E) int) (newpivot int, alreadyPartitioned bool) {
- data[a], data[pivot] = data[pivot], data[a]
- i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned
-
- for i <= j && (cmp(data[i], data[a]) < 0) {
- i++
- }
- for i <= j && !(cmp(data[j], data[a]) < 0) {
- j--
- }
- if i > j {
- data[j], data[a] = data[a], data[j]
- return j, true
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
-
- for {
- for i <= j && (cmp(data[i], data[a]) < 0) {
- i++
- }
- for i <= j && !(cmp(data[j], data[a]) < 0) {
- j--
- }
- if i > j {
- break
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
- data[j], data[a] = data[a], data[j]
- return j, false
-}
-
-// partitionEqualCmpFunc partitions data[a:b] into elements equal to data[pivot] followed by elements greater than data[pivot].
-// It assumed that data[a:b] does not contain elements smaller than the data[pivot].
-func partitionEqualCmpFunc[E any](data []E, a, b, pivot int, cmp func(a, b E) int) (newpivot int) {
- data[a], data[pivot] = data[pivot], data[a]
- i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned
-
- for {
- for i <= j && !(cmp(data[a], data[i]) < 0) {
- i++
- }
- for i <= j && (cmp(data[a], data[j]) < 0) {
- j--
- }
- if i > j {
- break
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
- return i
-}
-
-// partialInsertionSortCmpFunc partially sorts a slice, returns true if the slice is sorted at the end.
-func partialInsertionSortCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) bool {
- const (
- maxSteps = 5 // maximum number of adjacent out-of-order pairs that will get shifted
- shortestShifting = 50 // don't shift any elements on short arrays
- )
- i := a + 1
- for j := 0; j < maxSteps; j++ {
- for i < b && !(cmp(data[i], data[i-1]) < 0) {
- i++
- }
-
- if i == b {
- return true
- }
-
- if b-a < shortestShifting {
- return false
- }
-
- data[i], data[i-1] = data[i-1], data[i]
-
- // Shift the smaller one to the left.
- if i-a >= 2 {
- for j := i - 1; j >= 1; j-- {
- if !(cmp(data[j], data[j-1]) < 0) {
- break
- }
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
- // Shift the greater one to the right.
- if b-i >= 2 {
- for j := i + 1; j < b; j++ {
- if !(cmp(data[j], data[j-1]) < 0) {
- break
- }
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
- }
- return false
-}
-
-// breakPatternsCmpFunc scatters some elements around in an attempt to break some patterns
-// that might cause imbalanced partitions in quicksort.
-func breakPatternsCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) {
- length := b - a
- if length >= 8 {
- random := xorshift(length)
- modulus := nextPowerOfTwo(length)
-
- for idx := a + (length/4)*2 - 1; idx <= a+(length/4)*2+1; idx++ {
- other := int(uint(random.Next()) & (modulus - 1))
- if other >= length {
- other -= length
- }
- data[idx], data[a+other] = data[a+other], data[idx]
- }
- }
-}
-
-// choosePivotCmpFunc chooses a pivot in data[a:b].
-//
-// [0,8): chooses a static pivot.
-// [8,shortestNinther): uses the simple median-of-three method.
-// [shortestNinther,∞): uses the Tukey ninther method.
-func choosePivotCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) (pivot int, hint sortedHint) {
- const (
- shortestNinther = 50
- maxSwaps = 4 * 3
- )
-
- l := b - a
-
- var (
- swaps int
- i = a + l/4*1
- j = a + l/4*2
- k = a + l/4*3
- )
-
- if l >= 8 {
- if l >= shortestNinther {
- // Tukey ninther method, the idea came from Rust's implementation.
- i = medianAdjacentCmpFunc(data, i, &swaps, cmp)
- j = medianAdjacentCmpFunc(data, j, &swaps, cmp)
- k = medianAdjacentCmpFunc(data, k, &swaps, cmp)
- }
- // Find the median among i, j, k and stores it into j.
- j = medianCmpFunc(data, i, j, k, &swaps, cmp)
- }
-
- switch swaps {
- case 0:
- return j, increasingHint
- case maxSwaps:
- return j, decreasingHint
- default:
- return j, unknownHint
- }
-}
-
-// order2CmpFunc returns x,y where data[x] <= data[y], where x,y=a,b or x,y=b,a.
-func order2CmpFunc[E any](data []E, a, b int, swaps *int, cmp func(a, b E) int) (int, int) {
- if cmp(data[b], data[a]) < 0 {
- *swaps++
- return b, a
- }
- return a, b
-}
-
-// medianCmpFunc returns x where data[x] is the median of data[a],data[b],data[c], where x is a, b, or c.
-func medianCmpFunc[E any](data []E, a, b, c int, swaps *int, cmp func(a, b E) int) int {
- a, b = order2CmpFunc(data, a, b, swaps, cmp)
- b, c = order2CmpFunc(data, b, c, swaps, cmp)
- a, b = order2CmpFunc(data, a, b, swaps, cmp)
- return b
-}
-
-// medianAdjacentCmpFunc finds the median of data[a - 1], data[a], data[a + 1] and stores the index into a.
-func medianAdjacentCmpFunc[E any](data []E, a int, swaps *int, cmp func(a, b E) int) int {
- return medianCmpFunc(data, a-1, a, a+1, swaps, cmp)
-}
-
-func reverseRangeCmpFunc[E any](data []E, a, b int, cmp func(a, b E) int) {
- i := a
- j := b - 1
- for i < j {
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
-}
-
-func swapRangeCmpFunc[E any](data []E, a, b, n int, cmp func(a, b E) int) {
- for i := 0; i < n; i++ {
- data[a+i], data[b+i] = data[b+i], data[a+i]
- }
-}
-
-func stableCmpFunc[E any](data []E, n int, cmp func(a, b E) int) {
- blockSize := 20 // must be > 0
- a, b := 0, blockSize
- for b <= n {
- insertionSortCmpFunc(data, a, b, cmp)
- a = b
- b += blockSize
- }
- insertionSortCmpFunc(data, a, n, cmp)
-
- for blockSize < n {
- a, b = 0, 2*blockSize
- for b <= n {
- symMergeCmpFunc(data, a, a+blockSize, b, cmp)
- a = b
- b += 2 * blockSize
- }
- if m := a + blockSize; m < n {
- symMergeCmpFunc(data, a, m, n, cmp)
- }
- blockSize *= 2
- }
-}
-
-// symMergeCmpFunc merges the two sorted subsequences data[a:m] and data[m:b] using
-// the SymMerge algorithm from Pok-Son Kim and Arne Kutzner, "Stable Minimum
-// Storage Merging by Symmetric Comparisons", in Susanne Albers and Tomasz
-// Radzik, editors, Algorithms - ESA 2004, volume 3221 of Lecture Notes in
-// Computer Science, pages 714-723. Springer, 2004.
-//
-// Let M = m-a and N = b-n. Wolog M < N.
-// The recursion depth is bound by ceil(log(N+M)).
-// The algorithm needs O(M*log(N/M + 1)) calls to data.Less.
-// The algorithm needs O((M+N)*log(M)) calls to data.Swap.
-//
-// The paper gives O((M+N)*log(M)) as the number of assignments assuming a
-// rotation algorithm which uses O(M+N+gcd(M+N)) assignments. The argumentation
-// in the paper carries through for Swap operations, especially as the block
-// swapping rotate uses only O(M+N) Swaps.
-//
-// symMerge assumes non-degenerate arguments: a < m && m < b.
-// Having the caller check this condition eliminates many leaf recursion calls,
-// which improves performance.
-func symMergeCmpFunc[E any](data []E, a, m, b int, cmp func(a, b E) int) {
- // Avoid unnecessary recursions of symMerge
- // by direct insertion of data[a] into data[m:b]
- // if data[a:m] only contains one element.
- if m-a == 1 {
- // Use binary search to find the lowest index i
- // such that data[i] >= data[a] for m <= i < b.
- // Exit the search loop with i == b in case no such index exists.
- i := m
- j := b
- for i < j {
- h := int(uint(i+j) >> 1)
- if cmp(data[h], data[a]) < 0 {
- i = h + 1
- } else {
- j = h
- }
- }
- // Swap values until data[a] reaches the position before i.
- for k := a; k < i-1; k++ {
- data[k], data[k+1] = data[k+1], data[k]
- }
- return
- }
-
- // Avoid unnecessary recursions of symMerge
- // by direct insertion of data[m] into data[a:m]
- // if data[m:b] only contains one element.
- if b-m == 1 {
- // Use binary search to find the lowest index i
- // such that data[i] > data[m] for a <= i < m.
- // Exit the search loop with i == m in case no such index exists.
- i := a
- j := m
- for i < j {
- h := int(uint(i+j) >> 1)
- if !(cmp(data[m], data[h]) < 0) {
- i = h + 1
- } else {
- j = h
- }
- }
- // Swap values until data[m] reaches the position i.
- for k := m; k > i; k-- {
- data[k], data[k-1] = data[k-1], data[k]
- }
- return
- }
-
- mid := int(uint(a+b) >> 1)
- n := mid + m
- var start, r int
- if m > mid {
- start = n - b
- r = mid
- } else {
- start = a
- r = m
- }
- p := n - 1
-
- for start < r {
- c := int(uint(start+r) >> 1)
- if !(cmp(data[p-c], data[c]) < 0) {
- start = c + 1
- } else {
- r = c
- }
- }
-
- end := n - start
- if start < m && m < end {
- rotateCmpFunc(data, start, m, end, cmp)
- }
- if a < start && start < mid {
- symMergeCmpFunc(data, a, start, mid, cmp)
- }
- if mid < end && end < b {
- symMergeCmpFunc(data, mid, end, b, cmp)
- }
-}
-
-// rotateCmpFunc rotates two consecutive blocks u = data[a:m] and v = data[m:b] in data:
-// Data of the form 'x u v y' is changed to 'x v u y'.
-// rotate performs at most b-a many calls to data.Swap,
-// and it assumes non-degenerate arguments: a < m && m < b.
-func rotateCmpFunc[E any](data []E, a, m, b int, cmp func(a, b E) int) {
- i := m - a
- j := b - m
-
- for i != j {
- if i > j {
- swapRangeCmpFunc(data, m-i, m, j, cmp)
- i -= j
- } else {
- swapRangeCmpFunc(data, m-i, m+j-i, i, cmp)
- j -= i
- }
- }
- // i == j
- swapRangeCmpFunc(data, m-i, m, i, cmp)
-}
diff --git a/vendor/golang.org/x/exp/slices/zsortordered.go b/vendor/golang.org/x/exp/slices/zsortordered.go
deleted file mode 100644
index 99b47c398..000000000
--- a/vendor/golang.org/x/exp/slices/zsortordered.go
+++ /dev/null
@@ -1,481 +0,0 @@
-// Code generated by gen_sort_variants.go; DO NOT EDIT.
-
-// Copyright 2022 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package slices
-
-import "golang.org/x/exp/constraints"
-
-// insertionSortOrdered sorts data[a:b] using insertion sort.
-func insertionSortOrdered[E constraints.Ordered](data []E, a, b int) {
- for i := a + 1; i < b; i++ {
- for j := i; j > a && cmpLess(data[j], data[j-1]); j-- {
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
-}
-
-// siftDownOrdered implements the heap property on data[lo:hi].
-// first is an offset into the array where the root of the heap lies.
-func siftDownOrdered[E constraints.Ordered](data []E, lo, hi, first int) {
- root := lo
- for {
- child := 2*root + 1
- if child >= hi {
- break
- }
- if child+1 < hi && cmpLess(data[first+child], data[first+child+1]) {
- child++
- }
- if !cmpLess(data[first+root], data[first+child]) {
- return
- }
- data[first+root], data[first+child] = data[first+child], data[first+root]
- root = child
- }
-}
-
-func heapSortOrdered[E constraints.Ordered](data []E, a, b int) {
- first := a
- lo := 0
- hi := b - a
-
- // Build heap with greatest element at top.
- for i := (hi - 1) / 2; i >= 0; i-- {
- siftDownOrdered(data, i, hi, first)
- }
-
- // Pop elements, largest first, into end of data.
- for i := hi - 1; i >= 0; i-- {
- data[first], data[first+i] = data[first+i], data[first]
- siftDownOrdered(data, lo, i, first)
- }
-}
-
-// pdqsortOrdered sorts data[a:b].
-// The algorithm based on pattern-defeating quicksort(pdqsort), but without the optimizations from BlockQuicksort.
-// pdqsort paper: https://arxiv.org/pdf/2106.05123.pdf
-// C++ implementation: https://github.com/orlp/pdqsort
-// Rust implementation: https://docs.rs/pdqsort/latest/pdqsort/
-// limit is the number of allowed bad (very unbalanced) pivots before falling back to heapsort.
-func pdqsortOrdered[E constraints.Ordered](data []E, a, b, limit int) {
- const maxInsertion = 12
-
- var (
- wasBalanced = true // whether the last partitioning was reasonably balanced
- wasPartitioned = true // whether the slice was already partitioned
- )
-
- for {
- length := b - a
-
- if length <= maxInsertion {
- insertionSortOrdered(data, a, b)
- return
- }
-
- // Fall back to heapsort if too many bad choices were made.
- if limit == 0 {
- heapSortOrdered(data, a, b)
- return
- }
-
- // If the last partitioning was imbalanced, we need to breaking patterns.
- if !wasBalanced {
- breakPatternsOrdered(data, a, b)
- limit--
- }
-
- pivot, hint := choosePivotOrdered(data, a, b)
- if hint == decreasingHint {
- reverseRangeOrdered(data, a, b)
- // The chosen pivot was pivot-a elements after the start of the array.
- // After reversing it is pivot-a elements before the end of the array.
- // The idea came from Rust's implementation.
- pivot = (b - 1) - (pivot - a)
- hint = increasingHint
- }
-
- // The slice is likely already sorted.
- if wasBalanced && wasPartitioned && hint == increasingHint {
- if partialInsertionSortOrdered(data, a, b) {
- return
- }
- }
-
- // Probably the slice contains many duplicate elements, partition the slice into
- // elements equal to and elements greater than the pivot.
- if a > 0 && !cmpLess(data[a-1], data[pivot]) {
- mid := partitionEqualOrdered(data, a, b, pivot)
- a = mid
- continue
- }
-
- mid, alreadyPartitioned := partitionOrdered(data, a, b, pivot)
- wasPartitioned = alreadyPartitioned
-
- leftLen, rightLen := mid-a, b-mid
- balanceThreshold := length / 8
- if leftLen < rightLen {
- wasBalanced = leftLen >= balanceThreshold
- pdqsortOrdered(data, a, mid, limit)
- a = mid + 1
- } else {
- wasBalanced = rightLen >= balanceThreshold
- pdqsortOrdered(data, mid+1, b, limit)
- b = mid
- }
- }
-}
-
-// partitionOrdered does one quicksort partition.
-// Let p = data[pivot]
-// Moves elements in data[a:b] around, so that data[i]<p and data[j]>=p for i<newpivot and j>newpivot.
-// On return, data[newpivot] = p
-func partitionOrdered[E constraints.Ordered](data []E, a, b, pivot int) (newpivot int, alreadyPartitioned bool) {
- data[a], data[pivot] = data[pivot], data[a]
- i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned
-
- for i <= j && cmpLess(data[i], data[a]) {
- i++
- }
- for i <= j && !cmpLess(data[j], data[a]) {
- j--
- }
- if i > j {
- data[j], data[a] = data[a], data[j]
- return j, true
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
-
- for {
- for i <= j && cmpLess(data[i], data[a]) {
- i++
- }
- for i <= j && !cmpLess(data[j], data[a]) {
- j--
- }
- if i > j {
- break
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
- data[j], data[a] = data[a], data[j]
- return j, false
-}
-
-// partitionEqualOrdered partitions data[a:b] into elements equal to data[pivot] followed by elements greater than data[pivot].
-// It assumed that data[a:b] does not contain elements smaller than the data[pivot].
-func partitionEqualOrdered[E constraints.Ordered](data []E, a, b, pivot int) (newpivot int) {
- data[a], data[pivot] = data[pivot], data[a]
- i, j := a+1, b-1 // i and j are inclusive of the elements remaining to be partitioned
-
- for {
- for i <= j && !cmpLess(data[a], data[i]) {
- i++
- }
- for i <= j && cmpLess(data[a], data[j]) {
- j--
- }
- if i > j {
- break
- }
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
- return i
-}
-
-// partialInsertionSortOrdered partially sorts a slice, returns true if the slice is sorted at the end.
-func partialInsertionSortOrdered[E constraints.Ordered](data []E, a, b int) bool {
- const (
- maxSteps = 5 // maximum number of adjacent out-of-order pairs that will get shifted
- shortestShifting = 50 // don't shift any elements on short arrays
- )
- i := a + 1
- for j := 0; j < maxSteps; j++ {
- for i < b && !cmpLess(data[i], data[i-1]) {
- i++
- }
-
- if i == b {
- return true
- }
-
- if b-a < shortestShifting {
- return false
- }
-
- data[i], data[i-1] = data[i-1], data[i]
-
- // Shift the smaller one to the left.
- if i-a >= 2 {
- for j := i - 1; j >= 1; j-- {
- if !cmpLess(data[j], data[j-1]) {
- break
- }
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
- // Shift the greater one to the right.
- if b-i >= 2 {
- for j := i + 1; j < b; j++ {
- if !cmpLess(data[j], data[j-1]) {
- break
- }
- data[j], data[j-1] = data[j-1], data[j]
- }
- }
- }
- return false
-}
-
-// breakPatternsOrdered scatters some elements around in an attempt to break some patterns
-// that might cause imbalanced partitions in quicksort.
-func breakPatternsOrdered[E constraints.Ordered](data []E, a, b int) {
- length := b - a
- if length >= 8 {
- random := xorshift(length)
- modulus := nextPowerOfTwo(length)
-
- for idx := a + (length/4)*2 - 1; idx <= a+(length/4)*2+1; idx++ {
- other := int(uint(random.Next()) & (modulus - 1))
- if other >= length {
- other -= length
- }
- data[idx], data[a+other] = data[a+other], data[idx]
- }
- }
-}
-
-// choosePivotOrdered chooses a pivot in data[a:b].
-//
-// [0,8): chooses a static pivot.
-// [8,shortestNinther): uses the simple median-of-three method.
-// [shortestNinther,∞): uses the Tukey ninther method.
-func choosePivotOrdered[E constraints.Ordered](data []E, a, b int) (pivot int, hint sortedHint) {
- const (
- shortestNinther = 50
- maxSwaps = 4 * 3
- )
-
- l := b - a
-
- var (
- swaps int
- i = a + l/4*1
- j = a + l/4*2
- k = a + l/4*3
- )
-
- if l >= 8 {
- if l >= shortestNinther {
- // Tukey ninther method, the idea came from Rust's implementation.
- i = medianAdjacentOrdered(data, i, &swaps)
- j = medianAdjacentOrdered(data, j, &swaps)
- k = medianAdjacentOrdered(data, k, &swaps)
- }
- // Find the median among i, j, k and stores it into j.
- j = medianOrdered(data, i, j, k, &swaps)
- }
-
- switch swaps {
- case 0:
- return j, increasingHint
- case maxSwaps:
- return j, decreasingHint
- default:
- return j, unknownHint
- }
-}
-
-// order2Ordered returns x,y where data[x] <= data[y], where x,y=a,b or x,y=b,a.
-func order2Ordered[E constraints.Ordered](data []E, a, b int, swaps *int) (int, int) {
- if cmpLess(data[b], data[a]) {
- *swaps++
- return b, a
- }
- return a, b
-}
-
-// medianOrdered returns x where data[x] is the median of data[a],data[b],data[c], where x is a, b, or c.
-func medianOrdered[E constraints.Ordered](data []E, a, b, c int, swaps *int) int {
- a, b = order2Ordered(data, a, b, swaps)
- b, c = order2Ordered(data, b, c, swaps)
- a, b = order2Ordered(data, a, b, swaps)
- return b
-}
-
-// medianAdjacentOrdered finds the median of data[a - 1], data[a], data[a + 1] and stores the index into a.
-func medianAdjacentOrdered[E constraints.Ordered](data []E, a int, swaps *int) int {
- return medianOrdered(data, a-1, a, a+1, swaps)
-}
-
-func reverseRangeOrdered[E constraints.Ordered](data []E, a, b int) {
- i := a
- j := b - 1
- for i < j {
- data[i], data[j] = data[j], data[i]
- i++
- j--
- }
-}
-
-func swapRangeOrdered[E constraints.Ordered](data []E, a, b, n int) {
- for i := 0; i < n; i++ {
- data[a+i], data[b+i] = data[b+i], data[a+i]
- }
-}
-
-func stableOrdered[E constraints.Ordered](data []E, n int) {
- blockSize := 20 // must be > 0
- a, b := 0, blockSize
- for b <= n {
- insertionSortOrdered(data, a, b)
- a = b
- b += blockSize
- }
- insertionSortOrdered(data, a, n)
-
- for blockSize < n {
- a, b = 0, 2*blockSize
- for b <= n {
- symMergeOrdered(data, a, a+blockSize, b)
- a = b
- b += 2 * blockSize
- }
- if m := a + blockSize; m < n {
- symMergeOrdered(data, a, m, n)
- }
- blockSize *= 2
- }
-}
-
-// symMergeOrdered merges the two sorted subsequences data[a:m] and data[m:b] using
-// the SymMerge algorithm from Pok-Son Kim and Arne Kutzner, "Stable Minimum
-// Storage Merging by Symmetric Comparisons", in Susanne Albers and Tomasz
-// Radzik, editors, Algorithms - ESA 2004, volume 3221 of Lecture Notes in
-// Computer Science, pages 714-723. Springer, 2004.
-//
-// Let M = m-a and N = b-n. Wolog M < N.
-// The recursion depth is bound by ceil(log(N+M)).
-// The algorithm needs O(M*log(N/M + 1)) calls to data.Less.
-// The algorithm needs O((M+N)*log(M)) calls to data.Swap.
-//
-// The paper gives O((M+N)*log(M)) as the number of assignments assuming a
-// rotation algorithm which uses O(M+N+gcd(M+N)) assignments. The argumentation
-// in the paper carries through for Swap operations, especially as the block
-// swapping rotate uses only O(M+N) Swaps.
-//
-// symMerge assumes non-degenerate arguments: a < m && m < b.
-// Having the caller check this condition eliminates many leaf recursion calls,
-// which improves performance.
-func symMergeOrdered[E constraints.Ordered](data []E, a, m, b int) {
- // Avoid unnecessary recursions of symMerge
- // by direct insertion of data[a] into data[m:b]
- // if data[a:m] only contains one element.
- if m-a == 1 {
- // Use binary search to find the lowest index i
- // such that data[i] >= data[a] for m <= i < b.
- // Exit the search loop with i == b in case no such index exists.
- i := m
- j := b
- for i < j {
- h := int(uint(i+j) >> 1)
- if cmpLess(data[h], data[a]) {
- i = h + 1
- } else {
- j = h
- }
- }
- // Swap values until data[a] reaches the position before i.
- for k := a; k < i-1; k++ {
- data[k], data[k+1] = data[k+1], data[k]
- }
- return
- }
-
- // Avoid unnecessary recursions of symMerge
- // by direct insertion of data[m] into data[a:m]
- // if data[m:b] only contains one element.
- if b-m == 1 {
- // Use binary search to find the lowest index i
- // such that data[i] > data[m] for a <= i < m.
- // Exit the search loop with i == m in case no such index exists.
- i := a
- j := m
- for i < j {
- h := int(uint(i+j) >> 1)
- if !cmpLess(data[m], data[h]) {
- i = h + 1
- } else {
- j = h
- }
- }
- // Swap values until data[m] reaches the position i.
- for k := m; k > i; k-- {
- data[k], data[k-1] = data[k-1], data[k]
- }
- return
- }
-
- mid := int(uint(a+b) >> 1)
- n := mid + m
- var start, r int
- if m > mid {
- start = n - b
- r = mid
- } else {
- start = a
- r = m
- }
- p := n - 1
-
- for start < r {
- c := int(uint(start+r) >> 1)
- if !cmpLess(data[p-c], data[c]) {
- start = c + 1
- } else {
- r = c
- }
- }
-
- end := n - start
- if start < m && m < end {
- rotateOrdered(data, start, m, end)
- }
- if a < start && start < mid {
- symMergeOrdered(data, a, start, mid)
- }
- if mid < end && end < b {
- symMergeOrdered(data, mid, end, b)
- }
-}
-
-// rotateOrdered rotates two consecutive blocks u = data[a:m] and v = data[m:b] in data:
-// Data of the form 'x u v y' is changed to 'x v u y'.
-// rotate performs at most b-a many calls to data.Swap,
-// and it assumes non-degenerate arguments: a < m && m < b.
-func rotateOrdered[E constraints.Ordered](data []E, a, m, b int) {
- i := m - a
- j := b - m
-
- for i != j {
- if i > j {
- swapRangeOrdered(data, m-i, m, j)
- i -= j
- } else {
- swapRangeOrdered(data, m-i, m+j-i, i)
- j -= i
- }
- }
- // i == j
- swapRangeOrdered(data, m-i, m, i)
-}
diff --git a/vendor/golang.org/x/exp/typeparams/LICENSE b/vendor/golang.org/x/exp/typeparams/LICENSE
index 6a66aea5e..2a7cf70da 100644
--- a/vendor/golang.org/x/exp/typeparams/LICENSE
+++ b/vendor/golang.org/x/exp/typeparams/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2009 The Go Authors. All rights reserved.
+Copyright 2009 The Go Authors.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
@@ -10,7 +10,7 @@ notice, this list of conditions and the following disclaimer.
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
- * Neither the name of Google Inc. nor the names of its
+ * Neither the name of Google LLC nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
diff --git a/vendor/golang.org/x/net/http2/client_conn_pool.go b/vendor/golang.org/x/net/http2/client_conn_pool.go
index 780968d6c..e81b73e6a 100644
--- a/vendor/golang.org/x/net/http2/client_conn_pool.go
+++ b/vendor/golang.org/x/net/http2/client_conn_pool.go
@@ -8,8 +8,8 @@ package http2
import (
"context"
- "crypto/tls"
"errors"
+ "net"
"net/http"
"sync"
)
@@ -158,7 +158,7 @@ func (c *dialCall) dial(ctx context.Context, addr string) {
// This code decides which ones live or die.
// The return value used is whether c was used.
// c is never closed.
-func (p *clientConnPool) addConnIfNeeded(key string, t *Transport, c *tls.Conn) (used bool, err error) {
+func (p *clientConnPool) addConnIfNeeded(key string, t *Transport, c net.Conn) (used bool, err error) {
p.mu.Lock()
for _, cc := range p.conns[key] {
if cc.CanTakeNewRequest() {
@@ -194,8 +194,8 @@ type addConnCall struct {
err error
}
-func (c *addConnCall) run(t *Transport, key string, tc *tls.Conn) {
- cc, err := t.NewClientConn(tc)
+func (c *addConnCall) run(t *Transport, key string, nc net.Conn) {
+ cc, err := t.NewClientConn(nc)
p := c.p
p.mu.Lock()
diff --git a/vendor/golang.org/x/net/http2/server.go b/vendor/golang.org/x/net/http2/server.go
index 617b4a476..832414b45 100644
--- a/vendor/golang.org/x/net/http2/server.go
+++ b/vendor/golang.org/x/net/http2/server.go
@@ -306,7 +306,7 @@ func ConfigureServer(s *http.Server, conf *Server) error {
if s.TLSNextProto == nil {
s.TLSNextProto = map[string]func(*http.Server, *tls.Conn, http.Handler){}
}
- protoHandler := func(hs *http.Server, c *tls.Conn, h http.Handler) {
+ protoHandler := func(hs *http.Server, c net.Conn, h http.Handler, sawClientPreface bool) {
if testHookOnConn != nil {
testHookOnConn()
}
@@ -323,12 +323,31 @@ func ConfigureServer(s *http.Server, conf *Server) error {
ctx = bc.BaseContext()
}
conf.ServeConn(c, &ServeConnOpts{
- Context: ctx,
- Handler: h,
- BaseConfig: hs,
+ Context: ctx,
+ Handler: h,
+ BaseConfig: hs,
+ SawClientPreface: sawClientPreface,
})
}
- s.TLSNextProto[NextProtoTLS] = protoHandler
+ s.TLSNextProto[NextProtoTLS] = func(hs *http.Server, c *tls.Conn, h http.Handler) {
+ protoHandler(hs, c, h, false)
+ }
+ // The "unencrypted_http2" TLSNextProto key is used to pass off non-TLS HTTP/2 conns.
+ //
+ // A connection passed in this method has already had the HTTP/2 preface read from it.
+ s.TLSNextProto[nextProtoUnencryptedHTTP2] = func(hs *http.Server, c *tls.Conn, h http.Handler) {
+ nc, err := unencryptedNetConnFromTLSConn(c)
+ if err != nil {
+ if lg := hs.ErrorLog; lg != nil {
+ lg.Print(err)
+ } else {
+ log.Print(err)
+ }
+ go c.Close()
+ return
+ }
+ protoHandler(hs, nc, h, true)
+ }
return nil
}
@@ -2880,6 +2899,11 @@ func (w *responseWriter) SetWriteDeadline(deadline time.Time) error {
return nil
}
+func (w *responseWriter) EnableFullDuplex() error {
+ // We always support full duplex responses, so this is a no-op.
+ return nil
+}
+
func (w *responseWriter) Flush() {
w.FlushError()
}
diff --git a/vendor/golang.org/x/net/http2/transport.go b/vendor/golang.org/x/net/http2/transport.go
index 0c5f64aa8..f5968f440 100644
--- a/vendor/golang.org/x/net/http2/transport.go
+++ b/vendor/golang.org/x/net/http2/transport.go
@@ -202,6 +202,20 @@ func (t *Transport) markNewGoroutine() {
}
}
+func (t *Transport) now() time.Time {
+ if t != nil && t.transportTestHooks != nil {
+ return t.transportTestHooks.group.Now()
+ }
+ return time.Now()
+}
+
+func (t *Transport) timeSince(when time.Time) time.Duration {
+ if t != nil && t.transportTestHooks != nil {
+ return t.now().Sub(when)
+ }
+ return time.Since(when)
+}
+
// newTimer creates a new time.Timer, or a synthetic timer in tests.
func (t *Transport) newTimer(d time.Duration) timer {
if t.transportTestHooks != nil {
@@ -281,8 +295,8 @@ func configureTransports(t1 *http.Transport) (*Transport, error) {
if !strSliceContains(t1.TLSClientConfig.NextProtos, "http/1.1") {
t1.TLSClientConfig.NextProtos = append(t1.TLSClientConfig.NextProtos, "http/1.1")
}
- upgradeFn := func(authority string, c *tls.Conn) http.RoundTripper {
- addr := authorityAddr("https", authority)
+ upgradeFn := func(scheme, authority string, c net.Conn) http.RoundTripper {
+ addr := authorityAddr(scheme, authority)
if used, err := connPool.addConnIfNeeded(addr, t2, c); err != nil {
go c.Close()
return erringRoundTripper{err}
@@ -293,18 +307,37 @@ func configureTransports(t1 *http.Transport) (*Transport, error) {
// was unknown)
go c.Close()
}
+ if scheme == "http" {
+ return (*unencryptedTransport)(t2)
+ }
return t2
}
- if m := t1.TLSNextProto; len(m) == 0 {
- t1.TLSNextProto = map[string]func(string, *tls.Conn) http.RoundTripper{
- "h2": upgradeFn,
+ if t1.TLSNextProto == nil {
+ t1.TLSNextProto = make(map[string]func(string, *tls.Conn) http.RoundTripper)
+ }
+ t1.TLSNextProto[NextProtoTLS] = func(authority string, c *tls.Conn) http.RoundTripper {
+ return upgradeFn("https", authority, c)
+ }
+ // The "unencrypted_http2" TLSNextProto key is used to pass off non-TLS HTTP/2 conns.
+ t1.TLSNextProto[nextProtoUnencryptedHTTP2] = func(authority string, c *tls.Conn) http.RoundTripper {
+ nc, err := unencryptedNetConnFromTLSConn(c)
+ if err != nil {
+ go c.Close()
+ return erringRoundTripper{err}
}
- } else {
- m["h2"] = upgradeFn
+ return upgradeFn("http", authority, nc)
}
return t2, nil
}
+// unencryptedTransport is a Transport with a RoundTrip method that
+// always permits http:// URLs.
+type unencryptedTransport Transport
+
+func (t *unencryptedTransport) RoundTrip(req *http.Request) (*http.Response, error) {
+ return (*Transport)(t).RoundTripOpt(req, RoundTripOpt{allowHTTP: true})
+}
+
func (t *Transport) connPool() ClientConnPool {
t.connPoolOnce.Do(t.initConnPool)
return t.connPoolOrDef
@@ -324,7 +357,7 @@ type ClientConn struct {
t *Transport
tconn net.Conn // usually *tls.Conn, except specialized impls
tlsState *tls.ConnectionState // nil only for specialized impls
- reused uint32 // whether conn is being reused; atomic
+ atomicReused uint32 // whether conn is being reused; atomic
singleUse bool // whether being used for a single http.Request
getConnCalled bool // used by clientConnPool
@@ -364,6 +397,14 @@ type ClientConn struct {
readIdleTimeout time.Duration
pingTimeout time.Duration
+ // pendingResets is the number of RST_STREAM frames we have sent to the peer,
+ // without confirming that the peer has received them. When we send a RST_STREAM,
+ // we bundle it with a PING frame, unless a PING is already in flight. We count
+ // the reset stream against the connection's concurrency limit until we get
+ // a PING response. This limits the number of requests we'll try to send to a
+ // completely unresponsive connection.
+ pendingResets int
+
// reqHeaderMu is a 1-element semaphore channel controlling access to sending new requests.
// Write to reqHeaderMu to lock it, read from it to unlock.
// Lock reqmu BEFORE mu or wmu.
@@ -420,12 +461,12 @@ type clientStream struct {
sentHeaders bool
// owned by clientConnReadLoop:
- firstByte bool // got the first response byte
- pastHeaders bool // got first MetaHeadersFrame (actual headers)
- pastTrailers bool // got optional second MetaHeadersFrame (trailers)
- num1xx uint8 // number of 1xx responses seen
- readClosed bool // peer sent an END_STREAM flag
- readAborted bool // read loop reset the stream
+ firstByte bool // got the first response byte
+ pastHeaders bool // got first MetaHeadersFrame (actual headers)
+ pastTrailers bool // got optional second MetaHeadersFrame (trailers)
+ readClosed bool // peer sent an END_STREAM flag
+ readAborted bool // read loop reset the stream
+ totalHeaderSize int64 // total size of 1xx headers seen
trailer http.Header // accumulated trailers
resTrailer *http.Header // client's Response.Trailer
@@ -530,6 +571,8 @@ type RoundTripOpt struct {
// no cached connection is available, RoundTripOpt
// will return ErrNoCachedConn.
OnlyCachedConn bool
+
+ allowHTTP bool // allow http:// URLs
}
func (t *Transport) RoundTrip(req *http.Request) (*http.Response, error) {
@@ -562,7 +605,14 @@ func authorityAddr(scheme string, authority string) (addr string) {
// RoundTripOpt is like RoundTrip, but takes options.
func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Response, error) {
- if !(req.URL.Scheme == "https" || (req.URL.Scheme == "http" && t.AllowHTTP)) {
+ switch req.URL.Scheme {
+ case "https":
+ // Always okay.
+ case "http":
+ if !t.AllowHTTP && !opt.allowHTTP {
+ return nil, errors.New("http2: unencrypted HTTP/2 not enabled")
+ }
+ default:
return nil, errors.New("http2: unsupported scheme")
}
@@ -573,7 +623,7 @@ func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Res
t.vlogf("http2: Transport failed to get client conn for %s: %v", addr, err)
return nil, err
}
- reused := !atomic.CompareAndSwapUint32(&cc.reused, 0, 1)
+ reused := !atomic.CompareAndSwapUint32(&cc.atomicReused, 0, 1)
traceGotConn(req, cc, reused)
res, err := cc.RoundTrip(req)
if err != nil && retry <= 6 {
@@ -598,6 +648,22 @@ func (t *Transport) RoundTripOpt(req *http.Request, opt RoundTripOpt) (*http.Res
}
}
}
+ if err == errClientConnNotEstablished {
+ // This ClientConn was created recently,
+ // this is the first request to use it,
+ // and the connection is closed and not usable.
+ //
+ // In this state, cc.idleTimer will remove the conn from the pool
+ // when it fires. Stop the timer and remove it here so future requests
+ // won't try to use this connection.
+ //
+ // If the timer has already fired and we're racing it, the redundant
+ // call to MarkDead is harmless.
+ if cc.idleTimer != nil {
+ cc.idleTimer.Stop()
+ }
+ t.connPool().MarkDead(cc)
+ }
if err != nil {
t.vlogf("RoundTrip failure: %v", err)
return nil, err
@@ -616,9 +682,10 @@ func (t *Transport) CloseIdleConnections() {
}
var (
- errClientConnClosed = errors.New("http2: client conn is closed")
- errClientConnUnusable = errors.New("http2: client conn not usable")
- errClientConnGotGoAway = errors.New("http2: Transport received Server's graceful shutdown GOAWAY")
+ errClientConnClosed = errors.New("http2: client conn is closed")
+ errClientConnUnusable = errors.New("http2: client conn not usable")
+ errClientConnNotEstablished = errors.New("http2: client conn could not be established")
+ errClientConnGotGoAway = errors.New("http2: Transport received Server's graceful shutdown GOAWAY")
)
// shouldRetryRequest is called by RoundTrip when a request fails to get
@@ -757,6 +824,7 @@ func (t *Transport) newClientConn(c net.Conn, singleUse bool) (*ClientConn, erro
pingTimeout: conf.PingTimeout,
pings: make(map[[8]byte]chan struct{}),
reqHeaderMu: make(chan struct{}, 1),
+ lastActive: t.now(),
}
var group synctestGroupInterface
if t.transportTestHooks != nil {
@@ -960,7 +1028,7 @@ func (cc *ClientConn) State() ClientConnState {
return ClientConnState{
Closed: cc.closed,
Closing: cc.closing || cc.singleUse || cc.doNotReuse || cc.goAway != nil,
- StreamsActive: len(cc.streams),
+ StreamsActive: len(cc.streams) + cc.pendingResets,
StreamsReserved: cc.streamsReserved,
StreamsPending: cc.pendingRequests,
LastIdle: cc.lastIdle,
@@ -992,16 +1060,38 @@ func (cc *ClientConn) idleStateLocked() (st clientConnIdleState) {
// writing it.
maxConcurrentOkay = true
} else {
- maxConcurrentOkay = int64(len(cc.streams)+cc.streamsReserved+1) <= int64(cc.maxConcurrentStreams)
+ // We can take a new request if the total of
+ // - active streams;
+ // - reservation slots for new streams; and
+ // - streams for which we have sent a RST_STREAM and a PING,
+ // but received no subsequent frame
+ // is less than the concurrency limit.
+ maxConcurrentOkay = cc.currentRequestCountLocked() < int(cc.maxConcurrentStreams)
}
st.canTakeNewRequest = cc.goAway == nil && !cc.closed && !cc.closing && maxConcurrentOkay &&
!cc.doNotReuse &&
int64(cc.nextStreamID)+2*int64(cc.pendingRequests) < math.MaxInt32 &&
!cc.tooIdleLocked()
+
+ // If this connection has never been used for a request and is closed,
+ // then let it take a request (which will fail).
+ //
+ // This avoids a situation where an error early in a connection's lifetime
+ // goes unreported.
+ if cc.nextStreamID == 1 && cc.streamsReserved == 0 && cc.closed {
+ st.canTakeNewRequest = true
+ }
+
return
}
+// currentRequestCountLocked reports the number of concurrency slots currently in use,
+// including active streams, reserved slots, and reset streams waiting for acknowledgement.
+func (cc *ClientConn) currentRequestCountLocked() int {
+ return len(cc.streams) + cc.streamsReserved + cc.pendingResets
+}
+
func (cc *ClientConn) canTakeNewRequestLocked() bool {
st := cc.idleStateLocked()
return st.canTakeNewRequest
@@ -1014,7 +1104,7 @@ func (cc *ClientConn) tooIdleLocked() bool {
// times are compared based on their wall time. We don't want
// to reuse a connection that's been sitting idle during
// VM/laptop suspend if monotonic time was also frozen.
- return cc.idleTimeout != 0 && !cc.lastIdle.IsZero() && time.Since(cc.lastIdle.Round(0)) > cc.idleTimeout
+ return cc.idleTimeout != 0 && !cc.lastIdle.IsZero() && cc.t.timeSince(cc.lastIdle.Round(0)) > cc.idleTimeout
}
// onIdleTimeout is called from a time.AfterFunc goroutine. It will
@@ -1578,6 +1668,7 @@ func (cs *clientStream) cleanupWriteRequest(err error) {
cs.reqBodyClosed = make(chan struct{})
}
bodyClosed := cs.reqBodyClosed
+ closeOnIdle := cc.singleUse || cc.doNotReuse || cc.t.disableKeepAlives() || cc.goAway != nil
cc.mu.Unlock()
if mustCloseBody {
cs.reqBody.Close()
@@ -1602,16 +1693,40 @@ func (cs *clientStream) cleanupWriteRequest(err error) {
if cs.sentHeaders {
if se, ok := err.(StreamError); ok {
if se.Cause != errFromPeer {
- cc.writeStreamReset(cs.ID, se.Code, err)
+ cc.writeStreamReset(cs.ID, se.Code, false, err)
}
} else {
- cc.writeStreamReset(cs.ID, ErrCodeCancel, err)
+ // We're cancelling an in-flight request.
+ //
+ // This could be due to the server becoming unresponsive.
+ // To avoid sending too many requests on a dead connection,
+ // we let the request continue to consume a concurrency slot
+ // until we can confirm the server is still responding.
+ // We do this by sending a PING frame along with the RST_STREAM
+ // (unless a ping is already in flight).
+ //
+ // For simplicity, we don't bother tracking the PING payload:
+ // We reset cc.pendingResets any time we receive a PING ACK.
+ //
+ // We skip this if the conn is going to be closed on idle,
+ // because it's short lived and will probably be closed before
+ // we get the ping response.
+ ping := false
+ if !closeOnIdle {
+ cc.mu.Lock()
+ if cc.pendingResets == 0 {
+ ping = true
+ }
+ cc.pendingResets++
+ cc.mu.Unlock()
+ }
+ cc.writeStreamReset(cs.ID, ErrCodeCancel, ping, err)
}
}
cs.bufPipe.CloseWithError(err) // no-op if already closed
} else {
if cs.sentHeaders && !cs.sentEndStream {
- cc.writeStreamReset(cs.ID, ErrCodeNo, nil)
+ cc.writeStreamReset(cs.ID, ErrCodeNo, false, nil)
}
cs.bufPipe.CloseWithError(errRequestCanceled)
}
@@ -1633,12 +1748,17 @@ func (cs *clientStream) cleanupWriteRequest(err error) {
// Must hold cc.mu.
func (cc *ClientConn) awaitOpenSlotForStreamLocked(cs *clientStream) error {
for {
- cc.lastActive = time.Now()
+ if cc.closed && cc.nextStreamID == 1 && cc.streamsReserved == 0 {
+ // This is the very first request sent to this connection.
+ // Return a fatal error which aborts the retry loop.
+ return errClientConnNotEstablished
+ }
+ cc.lastActive = cc.t.now()
if cc.closed || !cc.canTakeNewRequestLocked() {
return errClientConnUnusable
}
cc.lastIdle = time.Time{}
- if int64(len(cc.streams)) < int64(cc.maxConcurrentStreams) {
+ if cc.currentRequestCountLocked() < int(cc.maxConcurrentStreams) {
return nil
}
cc.pendingRequests++
@@ -2180,10 +2300,10 @@ func (cc *ClientConn) forgetStreamID(id uint32) {
if len(cc.streams) != slen-1 {
panic("forgetting unknown stream id")
}
- cc.lastActive = time.Now()
+ cc.lastActive = cc.t.now()
if len(cc.streams) == 0 && cc.idleTimer != nil {
cc.idleTimer.Reset(cc.idleTimeout)
- cc.lastIdle = time.Now()
+ cc.lastIdle = cc.t.now()
}
// Wake up writeRequestBody via clientStream.awaitFlowControl and
// wake up RoundTrip if there is a pending request.
@@ -2243,7 +2363,6 @@ func isEOFOrNetReadError(err error) bool {
func (rl *clientConnReadLoop) cleanup() {
cc := rl.cc
- cc.t.connPool().MarkDead(cc)
defer cc.closeConn()
defer close(cc.readerDone)
@@ -2267,6 +2386,24 @@ func (rl *clientConnReadLoop) cleanup() {
}
cc.closed = true
+ // If the connection has never been used, and has been open for only a short time,
+ // leave it in the connection pool for a little while.
+ //
+ // This avoids a situation where new connections are constantly created,
+ // added to the pool, fail, and are removed from the pool, without any error
+ // being surfaced to the user.
+ const unusedWaitTime = 5 * time.Second
+ idleTime := cc.t.now().Sub(cc.lastActive)
+ if atomic.LoadUint32(&cc.atomicReused) == 0 && idleTime < unusedWaitTime {
+ cc.idleTimer = cc.t.afterFunc(unusedWaitTime-idleTime, func() {
+ cc.t.connPool().MarkDead(cc)
+ })
+ } else {
+ cc.mu.Unlock() // avoid any deadlocks in MarkDead
+ cc.t.connPool().MarkDead(cc)
+ cc.mu.Lock()
+ }
+
for _, cs := range cc.streams {
select {
case <-cs.peerClosed:
@@ -2494,15 +2631,34 @@ func (rl *clientConnReadLoop) handleResponse(cs *clientStream, f *MetaHeadersFra
if f.StreamEnded() {
return nil, errors.New("1xx informational response with END_STREAM flag")
}
- cs.num1xx++
- const max1xxResponses = 5 // arbitrary bound on number of informational responses, same as net/http
- if cs.num1xx > max1xxResponses {
- return nil, errors.New("http2: too many 1xx informational responses")
- }
if fn := cs.get1xxTraceFunc(); fn != nil {
+ // If the 1xx response is being delivered to the user,
+ // then they're responsible for limiting the number
+ // of responses.
if err := fn(statusCode, textproto.MIMEHeader(header)); err != nil {
return nil, err
}
+ } else {
+ // If the user didn't examine the 1xx response, then we
+ // limit the size of all 1xx headers.
+ //
+ // This differs a bit from the HTTP/1 implementation, which
+ // limits the size of all 1xx headers plus the final response.
+ // Use the larger limit of MaxHeaderListSize and
+ // net/http.Transport.MaxResponseHeaderBytes.
+ limit := int64(cs.cc.t.maxHeaderListSize())
+ if t1 := cs.cc.t.t1; t1 != nil && t1.MaxResponseHeaderBytes > limit {
+ limit = t1.MaxResponseHeaderBytes
+ }
+ for _, h := range f.Fields {
+ cs.totalHeaderSize += int64(h.Size())
+ }
+ if cs.totalHeaderSize > limit {
+ if VerboseLogs {
+ log.Printf("http2: 1xx informational responses too large")
+ }
+ return nil, errors.New("header list too large")
+ }
}
if statusCode == 100 {
traceGot100Continue(cs.trace)
@@ -3046,6 +3202,11 @@ func (rl *clientConnReadLoop) processPing(f *PingFrame) error {
close(c)
delete(cc.pings, f.Data)
}
+ if cc.pendingResets > 0 {
+ // See clientStream.cleanupWriteRequest.
+ cc.pendingResets = 0
+ cc.cond.Broadcast()
+ }
return nil
}
cc := rl.cc
@@ -3068,13 +3229,20 @@ func (rl *clientConnReadLoop) processPushPromise(f *PushPromiseFrame) error {
return ConnectionError(ErrCodeProtocol)
}
-func (cc *ClientConn) writeStreamReset(streamID uint32, code ErrCode, err error) {
+// writeStreamReset sends a RST_STREAM frame.
+// When ping is true, it also sends a PING frame with a random payload.
+func (cc *ClientConn) writeStreamReset(streamID uint32, code ErrCode, ping bool, err error) {
// TODO: map err to more interesting error codes, once the
// HTTP community comes up with some. But currently for
// RST_STREAM there's no equivalent to GOAWAY frame's debug
// data, and the error codes are all pretty vague ("cancel").
cc.wmu.Lock()
cc.fr.WriteRSTStream(streamID, code)
+ if ping {
+ var payload [8]byte
+ rand.Read(payload[:])
+ cc.fr.WritePing(false, payload)
+ }
cc.bw.Flush()
cc.wmu.Unlock()
}
@@ -3228,7 +3396,7 @@ func traceGotConn(req *http.Request, cc *ClientConn, reused bool) {
cc.mu.Lock()
ci.WasIdle = len(cc.streams) == 0 && reused
if ci.WasIdle && !cc.lastActive.IsZero() {
- ci.IdleTime = time.Since(cc.lastActive)
+ ci.IdleTime = cc.t.timeSince(cc.lastActive)
}
cc.mu.Unlock()
diff --git a/vendor/golang.org/x/net/http2/unencrypted.go b/vendor/golang.org/x/net/http2/unencrypted.go
new file mode 100644
index 000000000..b2de21161
--- /dev/null
+++ b/vendor/golang.org/x/net/http2/unencrypted.go
@@ -0,0 +1,32 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package http2
+
+import (
+ "crypto/tls"
+ "errors"
+ "net"
+)
+
+const nextProtoUnencryptedHTTP2 = "unencrypted_http2"
+
+// unencryptedNetConnFromTLSConn retrieves a net.Conn wrapped in a *tls.Conn.
+//
+// TLSNextProto functions accept a *tls.Conn.
+//
+// When passing an unencrypted HTTP/2 connection to a TLSNextProto function,
+// we pass a *tls.Conn with an underlying net.Conn containing the unencrypted connection.
+// To be extra careful about mistakes (accidentally dropping TLS encryption in a place
+// where we want it), the tls.Conn contains a net.Conn with an UnencryptedNetConn method
+// that returns the actual connection we want to use.
+func unencryptedNetConnFromTLSConn(tc *tls.Conn) (net.Conn, error) {
+ conner, ok := tc.NetConn().(interface {
+ UnencryptedNetConn() net.Conn
+ })
+ if !ok {
+ return nil, errors.New("http2: TLS conn unexpectedly found in unencrypted handoff")
+ }
+ return conner.UnencryptedNetConn(), nil
+}
diff --git a/vendor/golang.org/x/term/README.md b/vendor/golang.org/x/term/README.md
index d03d0aefe..05ff623f9 100644
--- a/vendor/golang.org/x/term/README.md
+++ b/vendor/golang.org/x/term/README.md
@@ -4,16 +4,13 @@
This repository provides Go terminal and console support packages.
-## Download/Install
-
-The easiest way to install is to run `go get -u golang.org/x/term`. You can
-also manually git clone the repository to `$GOPATH/src/golang.org/x/term`.
-
## Report Issues / Send Patches
This repository uses Gerrit for code changes. To learn how to submit changes to
-this repository, see https://golang.org/doc/contribute.html.
+this repository, see https://go.dev/doc/contribute.
+
+The git repository is https://go.googlesource.com/term.
The main issue tracker for the term repository is located at
-https://github.com/golang/go/issues. Prefix your issue with "x/term:" in the
+https://go.dev/issues. Prefix your issue with "x/term:" in the
subject line, so it is easy to find.
diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports.go b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
index 7463e641e..dcb5023a2 100644
--- a/vendor/golang.org/x/tools/cmd/goimports/goimports.go
+++ b/vendor/golang.org/x/tools/cmd/goimports/goimports.go
@@ -2,8 +2,6 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-//go:debug gotypesalias=0
-
package main
import (
diff --git a/vendor/golang.org/x/tools/cmd/goimports/gotypesalias.go b/vendor/golang.org/x/tools/cmd/goimports/gotypesalias.go
new file mode 100644
index 000000000..288c10c2d
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/goimports/gotypesalias.go
@@ -0,0 +1,12 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.23
+
+//go:debug gotypesalias=1
+
+package main
+
+// Materialize aliases whenever the go toolchain version is after 1.23 (#69772).
+// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1).
diff --git a/vendor/golang.org/x/tools/cmd/stringer/gotypesalias.go b/vendor/golang.org/x/tools/cmd/stringer/gotypesalias.go
new file mode 100644
index 000000000..288c10c2d
--- /dev/null
+++ b/vendor/golang.org/x/tools/cmd/stringer/gotypesalias.go
@@ -0,0 +1,12 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.23
+
+//go:debug gotypesalias=1
+
+package main
+
+// Materialize aliases whenever the go toolchain version is after 1.23 (#69772).
+// Remove this file after go.mod >= 1.23 (which implies gotypesalias=1).
diff --git a/vendor/golang.org/x/tools/cmd/stringer/stringer.go b/vendor/golang.org/x/tools/cmd/stringer/stringer.go
index 94eaee844..09be11ca5 100644
--- a/vendor/golang.org/x/tools/cmd/stringer/stringer.go
+++ b/vendor/golang.org/x/tools/cmd/stringer/stringer.go
@@ -70,9 +70,6 @@
// PillAspirin // Aspirin
//
// to suppress it in the output.
-
-//go:debug gotypesalias=0
-
package main // import "golang.org/x/tools/cmd/stringer"
import (
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
index c9ba1a375..b622dfdf3 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
@@ -57,6 +57,8 @@ type asmArch struct {
// include the first integer register and first floating-point register. Accessing
// any of them counts as writing to result.
retRegs []string
+ // writeResult is a list of instructions that will change result register implicity.
+ writeResult []string
// calculated during initialization
sizes types.Sizes
intSize int
@@ -85,18 +87,18 @@ type asmVar struct {
var (
asmArch386 = asmArch{name: "386", bigEndian: false, stack: "SP", lr: false}
asmArchArm = asmArch{name: "arm", bigEndian: false, stack: "R13", lr: true}
- asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}}
- asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}}
+ asmArchArm64 = asmArch{name: "arm64", bigEndian: false, stack: "RSP", lr: true, retRegs: []string{"R0", "F0"}, writeResult: []string{"SVC"}}
+ asmArchAmd64 = asmArch{name: "amd64", bigEndian: false, stack: "SP", lr: false, retRegs: []string{"AX", "X0"}, writeResult: []string{"SYSCALL"}}
asmArchMips = asmArch{name: "mips", bigEndian: true, stack: "R29", lr: true}
asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
- asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
- asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
- asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}}
+ asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}}
+ asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}, writeResult: []string{"SYSCALL"}}
+ asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true, retRegs: []string{"X10", "F10"}, writeResult: []string{"ECALL"}}
asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
- asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}}
+ asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true, retRegs: []string{"R4", "F0"}, writeResult: []string{"SYSCALL"}}
arches = []*asmArch{
&asmArch386,
@@ -351,6 +353,12 @@ Files:
}
if abi == "ABIInternal" && !haveRetArg {
+ for _, ins := range archDef.writeResult {
+ if strings.Contains(line, ins) {
+ haveRetArg = true
+ break
+ }
+ }
for _, reg := range archDef.retRegs {
if strings.Contains(line, reg) {
haveRetArg = true
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
index 26ec06831..613583a1a 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
@@ -179,7 +179,7 @@ func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*a
for _, raw := range files {
// If f is a cgo-generated file, Position reports
// the original file, honoring //line directives.
- filename := fset.Position(raw.Pos()).Filename
+ filename := fset.Position(raw.Pos()).Filename // sic: Pos, not FileStart
f, err := parser.ParseFile(fset, filename, nil, parser.SkipObjectResolution)
if err != nil {
return nil, nil, fmt.Errorf("can't parse raw cgo file: %v", err)
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go
index 28bf6c7e2..f789bdc81 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/doc.go
@@ -10,7 +10,7 @@
// lostcancel: check cancel func returned by context.WithCancel is called
//
// The cancellation function returned by context.WithCancel, WithTimeout,
-// and WithDeadline must be called or the new context will remain live
-// until its parent context is cancelled.
+// WithDeadline and variants such as WithCancelCause must be called,
+// or the new context will remain live until its parent context is cancelled.
// (The background context is never cancelled.)
package lostcancel
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
index bf56a5c06..26fdc1206 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/lostcancel/lostcancel.go
@@ -198,7 +198,9 @@ func isContextWithCancel(info *types.Info, n ast.Node) bool {
return false
}
switch sel.Sel.Name {
- case "WithCancel", "WithTimeout", "WithDeadline":
+ case "WithCancel", "WithCancelCause",
+ "WithTimeout", "WithTimeoutCause",
+ "WithDeadline", "WithDeadlineCause":
default:
return false
}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
index 0cade7bad..0129102a3 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/slog/slog.go
@@ -203,7 +203,7 @@ func kvFuncSkipArgs(fn *types.Func) (int, bool) {
// order to get to the ones that match the ...any parameter.
// The first key is the dereferenced receiver type name, or "" for a function.
var kvFuncs = map[string]map[string]int{
- "": map[string]int{
+ "": {
"Debug": 1,
"Info": 1,
"Warn": 1,
@@ -215,7 +215,7 @@ var kvFuncs = map[string]map[string]int{
"Log": 3,
"Group": 1,
},
- "Logger": map[string]int{
+ "Logger": {
"Debug": 1,
"Info": 1,
"Warn": 1,
@@ -227,7 +227,7 @@ var kvFuncs = map[string]map[string]int{
"Log": 3,
"With": 0,
},
- "Record": map[string]int{
+ "Record": {
"Add": 0,
},
}
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
index 5b4598235..36f2c43eb 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
@@ -48,7 +48,7 @@ var acceptedFuzzTypes = []types.Type{
func run(pass *analysis.Pass) (interface{}, error) {
for _, f := range pass.Files {
- if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") {
+ if !strings.HasSuffix(pass.Fset.File(f.FileStart).Name(), "_test.go") {
continue
}
for _, decl := range f.Decls {
diff --git a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
index 3f651fc26..2e209c8a6 100644
--- a/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
+++ b/vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
@@ -28,7 +28,15 @@ var Analyzer = &analysis.Analyzer{
Run: run,
}
-func run(pass *analysis.Pass) (interface{}, error) {
+func run(pass *analysis.Pass) (any, error) {
+ for _, pkg := range pass.Pkg.Imports() {
+ if pkg.Path() == "unsafe" {
+ // See golang/go#67684, or testdata/src/importsunsafe: the unusedwrite
+ // analyzer may have false positives when used with unsafe.
+ return nil, nil
+ }
+ }
+
ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
for _, fn := range ssainput.SrcFuncs {
reports := checkStores(fn)
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
index 18d1adb05..a6b5ed0a8 100644
--- a/vendor/golang.org/x/tools/go/ast/astutil/imports.go
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -344,7 +344,12 @@ func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (r
}
// UsesImport reports whether a given import is used.
+// The provided File must have been parsed with syntactic object resolution
+// (not using go/parser.SkipObjectResolution).
func UsesImport(f *ast.File, path string) (used bool) {
+ if f.Scope == nil {
+ panic("file f was not parsed with syntactic object resolution")
+ }
spec := importSpec(f, path)
if spec == nil {
return
diff --git a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
index 0e0ba4c03..958cf38de 100644
--- a/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
+++ b/vendor/golang.org/x/tools/go/ast/inspector/inspector.go
@@ -180,7 +180,9 @@ func (in *Inspector) WithStack(types []ast.Node, f func(n ast.Node, push bool, s
// traverse builds the table of events representing a traversal.
func traverse(files []*ast.File) []event {
// Preallocate approximate number of events
- // based on source file extent.
+ // based on source file extent of the declarations.
+ // (We use End-Pos not FileStart-FileEnd to neglect
+ // the effect of long doc comments.)
// This makes traverse faster by 4x (!).
var extent int
for _, f := range files {
diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
index 137cc8df1..f3ab0a2e1 100644
--- a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
+++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
@@ -2,22 +2,64 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Package gcexportdata provides functions for locating, reading, and
-// writing export data files containing type information produced by the
-// gc compiler. This package supports go1.7 export data format and all
-// later versions.
-//
-// Although it might seem convenient for this package to live alongside
-// go/types in the standard library, this would cause version skew
-// problems for developer tools that use it, since they must be able to
-// consume the outputs of the gc compiler both before and after a Go
-// update such as from Go 1.7 to Go 1.8. Because this package lives in
-// golang.org/x/tools, sites can update their version of this repo some
-// time before the Go 1.8 release and rebuild and redeploy their
-// developer tools, which will then be able to consume both Go 1.7 and
-// Go 1.8 export data files, so they will work before and after the
-// Go update. (See discussion at https://golang.org/issue/15651.)
-package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
+// Package gcexportdata provides functions for reading and writing
+// export data, which is a serialized description of the API of a Go
+// package including the names, kinds, types, and locations of all
+// exported declarations.
+//
+// The standard Go compiler (cmd/compile) writes an export data file
+// for each package it compiles, which it later reads when compiling
+// packages that import the earlier one. The compiler must thus
+// contain logic to both write and read export data.
+// (See the "Export" section in the cmd/compile/README file.)
+//
+// The [Read] function in this package can read files produced by the
+// compiler, producing [go/types] data structures. As a matter of
+// policy, Read supports export data files produced by only the last
+// two Go releases plus tip; see https://go.dev/issue/68898. The
+// export data files produced by the compiler contain additional
+// details related to generics, inlining, and other optimizations that
+// cannot be decoded by the [Read] function.
+//
+// In files written by the compiler, the export data is not at the
+// start of the file. Before calling Read, use [NewReader] to locate
+// the desired portion of the file.
+//
+// The [Write] function in this package encodes the exported API of a
+// Go package ([types.Package]) as a file. Such files can be later
+// decoded by Read, but cannot be consumed by the compiler.
+//
+// # Future changes
+//
+// Although Read supports the formats written by both Write and the
+// compiler, the two are quite different, and there is an open
+// proposal (https://go.dev/issue/69491) to separate these APIs.
+//
+// Under that proposal, this package would ultimately provide only the
+// Read operation for compiler export data, which must be defined in
+// this module (golang.org/x/tools), not in the standard library, to
+// avoid version skew for developer tools that need to read compiler
+// export data both before and after a Go release, such as from Go
+// 1.23 to Go 1.24. Because this package lives in the tools module,
+// clients can update their version of the module some time before the
+// Go 1.24 release and rebuild and redeploy their tools, which will
+// then be able to consume both Go 1.23 and Go 1.24 export data files,
+// so they will work before and after the Go update. (See discussion
+// at https://go.dev/issue/15651.)
+//
+// The operations to import and export [go/types] data structures
+// would be defined in the go/types package as Import and Export.
+// [Write] would (eventually) delegate to Export,
+// and [Read], when it detects a file produced by Export,
+// would delegate to Import.
+//
+// # Deprecations
+//
+// The [NewImporter] and [Find] functions are deprecated and should
+// not be used in new code. The [WriteBundle] and [ReadBundle]
+// functions are experimental, and there is an open proposal to
+// deprecate them (https://go.dev/issue/69573).
+package gcexportdata
import (
"bufio"
@@ -100,6 +142,11 @@ func readAll(r io.Reader) ([]byte, error) {
// Read reads export data from in, decodes it, and returns type
// information for the package.
//
+// Read is capable of reading export data produced by [Write] at the
+// same source code version, or by the last two Go releases (plus tip)
+// of the standard Go compiler. Reading files from older compilers may
+// produce an error.
+//
// The package path (effectively its linker symbol prefix) is
// specified by path, since unlike the package name, this information
// may not be recorded in the export data.
@@ -128,14 +175,26 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
// (from "version"). Select appropriate importer.
if len(data) > 0 {
switch data[0] {
- case 'v', 'c', 'd': // binary, till go1.10
+ case 'v', 'c', 'd':
+ // binary, produced by cmd/compile till go1.10
return nil, fmt.Errorf("binary (%c) import format is no longer supported", data[0])
- case 'i': // indexed, till go1.19
+ case 'i':
+ // indexed, produced by cmd/compile till go1.19,
+ // and also by [Write].
+ //
+ // If proposal #69491 is accepted, go/types
+ // serialization will be implemented by
+ // types.Export, to which Write would eventually
+ // delegate (explicitly dropping any pretence at
+ // inter-version Write-Read compatibility).
+ // This [Read] function would delegate to types.Import
+ // when it detects that the file was produced by Export.
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
return pkg, err
- case 'u': // unified, from go1.20
+ case 'u':
+ // unified, produced by cmd/compile since go1.20
_, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
return pkg, err
diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go
index 013c0f505..2d4865f66 100644
--- a/vendor/golang.org/x/tools/go/loader/loader.go
+++ b/vendor/golang.org/x/tools/go/loader/loader.go
@@ -23,7 +23,6 @@ import (
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/go/internal/cgo"
- "golang.org/x/tools/internal/versions"
)
var ignoreVendor build.ImportMode
@@ -341,13 +340,12 @@ func (conf *Config) addImport(path string, tests bool) {
func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
for _, info := range prog.AllPackages {
for _, f := range info.Files {
- if f.Pos() == token.NoPos {
- // This can happen if the parser saw
- // too many errors and bailed out.
- // (Use parser.AllErrors to prevent that.)
+ if f.FileStart == token.NoPos {
+ // Workaround for #70162 (undefined FileStart).
+ // TODO(adonovan): delete once go1.24 is assured.
continue
}
- if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) {
+ if !tokenFileContainsPos(prog.Fset.File(f.FileStart), start) {
continue
}
if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil {
@@ -1029,18 +1027,18 @@ func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
info := &PackageInfo{
Pkg: pkg,
Info: types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Instances: make(map[*ast.Ident]types.Instance),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ FileVersions: make(map[*ast.File]string),
},
errorFunc: imp.conf.TypeChecker.Error,
dir: dir,
}
- versions.InitFileVersions(&info.Info)
// Copy the types.Config so we can vary it across PackageInfos.
tc := imp.conf.TypeChecker
diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go
index 8f7afcb5d..96db9daf3 100644
--- a/vendor/golang.org/x/tools/go/packages/external.go
+++ b/vendor/golang.org/x/tools/go/packages/external.go
@@ -79,7 +79,7 @@ type DriverResponse struct {
// driver is the type for functions that query the build system for the
// packages named by the patterns.
-type driver func(cfg *Config, patterns ...string) (*DriverResponse, error)
+type driver func(cfg *Config, patterns []string) (*DriverResponse, error)
// findExternalDriver returns the file path of a tool that supplies
// the build system package structure, or "" if not found.
@@ -103,7 +103,7 @@ func findExternalDriver(cfg *Config) driver {
return nil
}
}
- return func(cfg *Config, words ...string) (*DriverResponse, error) {
+ return func(cfg *Config, patterns []string) (*DriverResponse, error) {
req, err := json.Marshal(DriverRequest{
Mode: cfg.Mode,
Env: cfg.Env,
@@ -117,7 +117,7 @@ func findExternalDriver(cfg *Config) driver {
buf := new(bytes.Buffer)
stderr := new(bytes.Buffer)
- cmd := exec.CommandContext(cfg.Context, tool, words...)
+ cmd := exec.CommandContext(cfg.Context, tool, patterns...)
cmd.Dir = cfg.Dir
// The cwd gets resolved to the real path. On Darwin, where
// /tmp is a symlink, this breaks anything that expects the
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
index 1a3a5b44f..76f910ece 100644
--- a/vendor/golang.org/x/tools/go/packages/golist.go
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -80,6 +80,12 @@ type golistState struct {
cfg *Config
ctx context.Context
+ runner *gocommand.Runner
+
+ // overlay is the JSON file that encodes the Config.Overlay
+ // mapping, used by 'go list -overlay=...'.
+ overlay string
+
envOnce sync.Once
goEnvError error
goEnv map[string]string
@@ -127,7 +133,10 @@ func (state *golistState) mustGetEnv() map[string]string {
// goListDriver uses the go list command to interpret the patterns and produce
// the build system package structure.
// See driver for more details.
-func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error) {
+//
+// overlay is the JSON file that encodes the cfg.Overlay
+// mapping, used by 'go list -overlay=...'
+func goListDriver(cfg *Config, runner *gocommand.Runner, overlay string, patterns []string) (_ *DriverResponse, err error) {
// Make sure that any asynchronous go commands are killed when we return.
parentCtx := cfg.Context
if parentCtx == nil {
@@ -142,13 +151,15 @@ func goListDriver(cfg *Config, patterns ...string) (_ *DriverResponse, err error
cfg: cfg,
ctx: ctx,
vendorDirs: map[string]bool{},
+ overlay: overlay,
+ runner: runner,
}
// Fill in response.Sizes asynchronously if necessary.
- if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
+ if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 {
errCh := make(chan error)
go func() {
- compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), cfg.gocmdRunner)
+ compiler, arch, err := getSizesForArgs(ctx, state.cfgInvocation(), runner)
response.dr.Compiler = compiler
response.dr.Arch = arch
errCh <- err
@@ -681,7 +692,7 @@ func (state *golistState) shouldAddFilenameFromError(p *jsonPackage) bool {
// getGoVersion returns the effective minor version of the go command.
func (state *golistState) getGoVersion() (int, error) {
state.goVersionOnce.Do(func() {
- state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.cfg.gocmdRunner)
+ state.goVersion, state.goVersionError = gocommand.GoVersion(state.ctx, state.cfgInvocation(), state.runner)
})
return state.goVersion, state.goVersionError
}
@@ -751,7 +762,7 @@ func jsonFlag(cfg *Config, goVersion int) string {
}
}
addFields("Name", "ImportPath", "Error") // These fields are always needed
- if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
+ if cfg.Mode&NeedFiles != 0 || cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 {
addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
"CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
"SwigFiles", "SwigCXXFiles", "SysoFiles")
@@ -759,7 +770,7 @@ func jsonFlag(cfg *Config, goVersion int) string {
addFields("TestGoFiles", "XTestGoFiles")
}
}
- if cfg.Mode&NeedTypes != 0 {
+ if cfg.Mode&(NeedTypes|NeedTypesInfo) != 0 {
// CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
// even when -compiled isn't passed in.
// TODO(#52435): Should we make the test ask for -compiled, or automatically
@@ -840,7 +851,7 @@ func (state *golistState) cfgInvocation() gocommand.Invocation {
Env: cfg.Env,
Logf: cfg.Logf,
WorkingDir: cfg.Dir,
- Overlay: cfg.goListOverlayFile,
+ Overlay: state.overlay,
}
}
@@ -851,11 +862,8 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
inv := state.cfgInvocation()
inv.Verb = verb
inv.Args = args
- gocmdRunner := cfg.gocmdRunner
- if gocmdRunner == nil {
- gocmdRunner = &gocommand.Runner{}
- }
- stdout, stderr, friendlyErr, err := gocmdRunner.RunRaw(cfg.Context, inv)
+
+ stdout, stderr, friendlyErr, err := state.runner.RunRaw(cfg.Context, inv)
if err != nil {
// Check for 'go' executable not being found.
if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
@@ -879,6 +887,12 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
return nil, friendlyErr
}
+ // Return an error if 'go list' failed due to missing tools in
+ // $GOROOT/pkg/tool/$GOOS_$GOARCH (#69606).
+ if len(stderr.String()) > 0 && strings.Contains(stderr.String(), `go: no such tool`) {
+ return nil, friendlyErr
+ }
+
// Is there an error running the C compiler in cgo? This will be reported in the "Error" field
// and should be suppressed by go list -e.
//
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index f227f1bab..2ecc64238 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -16,13 +16,13 @@ import (
"go/scanner"
"go/token"
"go/types"
- "io"
"log"
"os"
"path/filepath"
"runtime"
"strings"
"sync"
+ "sync/atomic"
"time"
"golang.org/x/sync/errgroup"
@@ -31,7 +31,6 @@ import (
"golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/packagesinternal"
"golang.org/x/tools/internal/typesinternal"
- "golang.org/x/tools/internal/versions"
)
// A LoadMode controls the amount of detail to return when loading.
@@ -56,7 +55,7 @@ const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
- // NeedFiles adds GoFiles and OtherFiles.
+ // NeedFiles adds GoFiles, OtherFiles, and IgnoredFiles
NeedFiles
// NeedCompiledGoFiles adds CompiledGoFiles.
@@ -78,7 +77,7 @@ const (
// NeedSyntax adds Syntax and Fset.
NeedSyntax
- // NeedTypesInfo adds TypesInfo.
+ // NeedTypesInfo adds TypesInfo and Fset.
NeedTypesInfo
// NeedTypesSizes adds TypesSizes.
@@ -145,13 +144,7 @@ const (
// A Config specifies details about how packages should be loaded.
// The zero value is a valid configuration.
//
-// Calls to Load do not modify this struct.
-//
-// TODO(adonovan): #67702: this is currently false: in fact,
-// calls to [Load] do not modify the public fields of this struct, but
-// may modify hidden fields, so concurrent calls to [Load] must not
-// use the same Config. But perhaps we should reestablish the
-// documented invariant.
+// Calls to [Load] do not modify this struct.
type Config struct {
// Mode controls the level of information returned for each package.
Mode LoadMode
@@ -182,19 +175,10 @@ type Config struct {
//
Env []string
- // gocmdRunner guards go command calls from concurrency errors.
- gocmdRunner *gocommand.Runner
-
// BuildFlags is a list of command-line flags to be passed through to
// the build system's query tool.
BuildFlags []string
- // modFile will be used for -modfile in go command invocations.
- modFile string
-
- // modFlag will be used for -modfile in go command invocations.
- modFlag string
-
// Fset provides source position information for syntax trees and types.
// If Fset is nil, Load will use a new fileset, but preserve Fset's value.
Fset *token.FileSet
@@ -241,9 +225,13 @@ type Config struct {
// drivers may vary in their level of support for overlays.
Overlay map[string][]byte
- // goListOverlayFile is the JSON file that encodes the Overlay
- // mapping, used by 'go list -overlay=...'
- goListOverlayFile string
+ // -- Hidden configuration fields only for use in x/tools --
+
+ // modFile will be used for -modfile in go command invocations.
+ modFile string
+
+ // modFlag will be used for -modfile in go command invocations.
+ modFlag string
}
// Load loads and returns the Go packages named by the given patterns.
@@ -334,21 +322,24 @@ func defaultDriver(cfg *Config, patterns ...string) (*DriverResponse, bool, erro
} else if !response.NotHandled {
return response, true, nil
}
- // (fall through)
+ // not handled: fall through
}
// go list fallback
- //
+
// Write overlays once, as there are many calls
// to 'go list' (one per chunk plus others too).
- overlay, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay)
+ overlayFile, cleanupOverlay, err := gocommand.WriteOverlays(cfg.Overlay)
if err != nil {
return nil, false, err
}
defer cleanupOverlay()
- cfg.goListOverlayFile = overlay
- response, err := callDriverOnChunks(goListDriver, cfg, chunks)
+ var runner gocommand.Runner // (shared across many 'go list' calls)
+ driver := func(cfg *Config, patterns []string) (*DriverResponse, error) {
+ return goListDriver(cfg, &runner, overlayFile, patterns)
+ }
+ response, err := callDriverOnChunks(driver, cfg, chunks)
if err != nil {
return nil, false, err
}
@@ -386,16 +377,14 @@ func splitIntoChunks(patterns []string, argMax int) ([][]string, error) {
func callDriverOnChunks(driver driver, cfg *Config, chunks [][]string) (*DriverResponse, error) {
if len(chunks) == 0 {
- return driver(cfg)
+ return driver(cfg, nil)
}
responses := make([]*DriverResponse, len(chunks))
errNotHandled := errors.New("driver returned NotHandled")
var g errgroup.Group
for i, chunk := range chunks {
- i := i
- chunk := chunk
g.Go(func() (err error) {
- responses[i], err = driver(cfg, chunk...)
+ responses[i], err = driver(cfg, chunk)
if responses[i] != nil && responses[i].NotHandled {
err = errNotHandled
}
@@ -692,18 +681,19 @@ func (p *Package) String() string { return p.ID }
// loaderPackage augments Package with state used during the loading phase
type loaderPackage struct {
*Package
- importErrors map[string]error // maps each bad import to its error
- loadOnce sync.Once
- color uint8 // for cycle detection
- needsrc bool // load from source (Mode >= LoadTypes)
- needtypes bool // type information is either requested or depended on
- initial bool // package was matched by a pattern
- goVersion int // minor version number of go command on PATH
+ importErrors map[string]error // maps each bad import to its error
+ preds []*loaderPackage // packages that import this one
+ unfinishedSuccs atomic.Int32 // number of direct imports not yet loaded
+ color uint8 // for cycle detection
+ needsrc bool // load from source (Mode >= LoadTypes)
+ needtypes bool // type information is either requested or depended on
+ initial bool // package was matched by a pattern
+ goVersion int // minor version number of go command on PATH
}
// loader holds the working state of a single call to load.
type loader struct {
- pkgs map[string]*loaderPackage
+ pkgs map[string]*loaderPackage // keyed by Package.ID
Config
sizes types.Sizes // non-nil if needed by mode
parseCache map[string]*parseValue
@@ -749,9 +739,6 @@ func newLoader(cfg *Config) *loader {
if ld.Config.Env == nil {
ld.Config.Env = os.Environ()
}
- if ld.Config.gocmdRunner == nil {
- ld.Config.gocmdRunner = &gocommand.Runner{}
- }
if ld.Context == nil {
ld.Context = context.Background()
}
@@ -765,7 +752,7 @@ func newLoader(cfg *Config) *loader {
ld.requestedMode = ld.Mode
ld.Mode = impliedLoadMode(ld.Mode)
- if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
+ if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 {
if ld.Fset == nil {
ld.Fset = token.NewFileSet()
}
@@ -806,7 +793,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
// This package needs type information if the caller requested types and the package is
// either a root, or it's a non-root and the user requested dependencies ...
- needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
+ needtypes := (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
// This package needs source if the call requested source (or types info, which implies source)
// and the package is either a root, or itas a non- root and the user requested dependencies...
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
@@ -831,9 +818,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
}
}
- if ld.Mode&NeedImports != 0 {
- // Materialize the import graph.
-
+ // Materialize the import graph if it is needed (NeedImports),
+ // or if we'll be using loadPackages (Need{Syntax|Types|TypesInfo}).
+ var leaves []*loaderPackage // packages with no unfinished successors
+ if ld.Mode&(NeedImports|NeedSyntax|NeedTypes|NeedTypesInfo) != 0 {
const (
white = 0 // new
grey = 1 // in progress
@@ -852,63 +840,76 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
// dependency on a package that does. These are the only packages
// for which we load source code.
var stack []*loaderPackage
- var visit func(lpkg *loaderPackage) bool
- visit = func(lpkg *loaderPackage) bool {
- switch lpkg.color {
- case black:
- return lpkg.needsrc
- case grey:
+ var visit func(from, lpkg *loaderPackage) bool
+ visit = func(from, lpkg *loaderPackage) bool {
+ if lpkg.color == grey {
panic("internal error: grey node")
}
- lpkg.color = grey
- stack = append(stack, lpkg) // push
- stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
- lpkg.Imports = make(map[string]*Package, len(stubs))
- for importPath, ipkg := range stubs {
- var importErr error
- imp := ld.pkgs[ipkg.ID]
- if imp == nil {
- // (includes package "C" when DisableCgo)
- importErr = fmt.Errorf("missing package: %q", ipkg.ID)
- } else if imp.color == grey {
- importErr = fmt.Errorf("import cycle: %s", stack)
+ if lpkg.color == white {
+ lpkg.color = grey
+ stack = append(stack, lpkg) // push
+ stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports
+ lpkg.Imports = make(map[string]*Package, len(stubs))
+ for importPath, ipkg := range stubs {
+ var importErr error
+ imp := ld.pkgs[ipkg.ID]
+ if imp == nil {
+ // (includes package "C" when DisableCgo)
+ importErr = fmt.Errorf("missing package: %q", ipkg.ID)
+ } else if imp.color == grey {
+ importErr = fmt.Errorf("import cycle: %s", stack)
+ }
+ if importErr != nil {
+ if lpkg.importErrors == nil {
+ lpkg.importErrors = make(map[string]error)
+ }
+ lpkg.importErrors[importPath] = importErr
+ continue
+ }
+
+ if visit(lpkg, imp) {
+ lpkg.needsrc = true
+ }
+ lpkg.Imports[importPath] = imp.Package
}
- if importErr != nil {
- if lpkg.importErrors == nil {
- lpkg.importErrors = make(map[string]error)
+
+ // -- postorder --
+
+ // Complete type information is required for the
+ // immediate dependencies of each source package.
+ if lpkg.needsrc && ld.Mode&NeedTypes != 0 {
+ for _, ipkg := range lpkg.Imports {
+ ld.pkgs[ipkg.ID].needtypes = true
}
- lpkg.importErrors[importPath] = importErr
- continue
}
- if visit(imp) {
- lpkg.needsrc = true
+ // NeedTypeSizes causes TypeSizes to be set even
+ // on packages for which types aren't needed.
+ if ld.Mode&NeedTypesSizes != 0 {
+ lpkg.TypesSizes = ld.sizes
}
- lpkg.Imports[importPath] = imp.Package
- }
- // Complete type information is required for the
- // immediate dependencies of each source package.
- if lpkg.needsrc && ld.Mode&NeedTypes != 0 {
- for _, ipkg := range lpkg.Imports {
- ld.pkgs[ipkg.ID].needtypes = true
+ // Add packages with no imports directly to the queue of leaves.
+ if len(lpkg.Imports) == 0 {
+ leaves = append(leaves, lpkg)
}
+
+ stack = stack[:len(stack)-1] // pop
+ lpkg.color = black
}
- // NeedTypeSizes causes TypeSizes to be set even
- // on packages for which types aren't needed.
- if ld.Mode&NeedTypesSizes != 0 {
- lpkg.TypesSizes = ld.sizes
+ // Add edge from predecessor.
+ if from != nil {
+ from.unfinishedSuccs.Add(+1) // incref
+ lpkg.preds = append(lpkg.preds, from)
}
- stack = stack[:len(stack)-1] // pop
- lpkg.color = black
return lpkg.needsrc
}
// For each initial package, create its import DAG.
for _, lpkg := range initial {
- visit(lpkg)
+ visit(nil, lpkg)
}
} else {
@@ -921,16 +922,45 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
// Load type data and syntax if needed, starting at
// the initial packages (roots of the import DAG).
- if ld.Mode&NeedTypes != 0 || ld.Mode&NeedSyntax != 0 {
- var wg sync.WaitGroup
- for _, lpkg := range initial {
- wg.Add(1)
- go func(lpkg *loaderPackage) {
- ld.loadRecursive(lpkg)
- wg.Done()
- }(lpkg)
+ if ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0 {
+
+ // We avoid using g.SetLimit to limit concurrency as
+ // it makes g.Go stop accepting work, which prevents
+ // workers from enqeuing, and thus finishing, and thus
+ // allowing the group to make progress: deadlock.
+ //
+ // Instead we use the ioLimit and cpuLimit semaphores.
+ g, _ := errgroup.WithContext(ld.Context)
+
+ // enqueues adds a package to the type-checking queue.
+ // It must have no unfinished successors.
+ var enqueue func(*loaderPackage)
+ enqueue = func(lpkg *loaderPackage) {
+ g.Go(func() error {
+ // Parse and type-check.
+ ld.loadPackage(lpkg)
+
+ // Notify each waiting predecessor,
+ // and enqueue it when it becomes a leaf.
+ for _, pred := range lpkg.preds {
+ if pred.unfinishedSuccs.Add(-1) == 0 { // decref
+ enqueue(pred)
+ }
+ }
+
+ return nil
+ })
+ }
+
+ // Load leaves first, adding new packages
+ // to the queue as they become leaves.
+ for _, leaf := range leaves {
+ enqueue(leaf)
+ }
+
+ if err := g.Wait(); err != nil {
+ return nil, err // cancelled
}
- wg.Wait()
}
// If the context is done, return its error and
@@ -977,7 +1007,7 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
if ld.requestedMode&NeedSyntax == 0 {
ld.pkgs[i].Syntax = nil
}
- if ld.requestedMode&NeedTypes == 0 && ld.requestedMode&NeedSyntax == 0 {
+ if ld.requestedMode&(NeedSyntax|NeedTypes|NeedTypesInfo) == 0 {
ld.pkgs[i].Fset = nil
}
if ld.requestedMode&NeedTypesInfo == 0 {
@@ -994,31 +1024,10 @@ func (ld *loader) refine(response *DriverResponse) ([]*Package, error) {
return result, nil
}
-// loadRecursive loads the specified package and its dependencies,
-// recursively, in parallel, in topological order.
-// It is atomic and idempotent.
-// Precondition: ld.Mode&NeedTypes.
-func (ld *loader) loadRecursive(lpkg *loaderPackage) {
- lpkg.loadOnce.Do(func() {
- // Load the direct dependencies, in parallel.
- var wg sync.WaitGroup
- for _, ipkg := range lpkg.Imports {
- imp := ld.pkgs[ipkg.ID]
- wg.Add(1)
- go func(imp *loaderPackage) {
- ld.loadRecursive(imp)
- wg.Done()
- }(imp)
- }
- wg.Wait()
- ld.loadPackage(lpkg)
- })
-}
-
-// loadPackage loads the specified package.
+// loadPackage loads/parses/typechecks the specified package.
// It must be called only once per Package,
// after immediate dependencies are loaded.
-// Precondition: ld.Mode & NeedTypes.
+// Precondition: ld.Mode&(NeedSyntax|NeedTypes|NeedTypesInfo) != 0.
func (ld *loader) loadPackage(lpkg *loaderPackage) {
if lpkg.PkgPath == "unsafe" {
// Fill in the blanks to avoid surprises.
@@ -1054,6 +1063,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
if !lpkg.needtypes && !lpkg.needsrc {
return
}
+
+ // TODO(adonovan): this condition looks wrong:
+ // I think it should be lpkg.needtypes && !lpg.needsrc,
+ // so that NeedSyntax without NeedTypes can be satisfied by export data.
if !lpkg.needsrc {
if err := ld.loadFromExportData(lpkg); err != nil {
lpkg.Errors = append(lpkg.Errors, Error{
@@ -1159,7 +1172,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
}
lpkg.Syntax = files
- if ld.Config.Mode&NeedTypes == 0 {
+ if ld.Config.Mode&(NeedTypes|NeedTypesInfo) == 0 {
return
}
@@ -1170,16 +1183,20 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
return
}
- lpkg.TypesInfo = &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Instances: make(map[*ast.Ident]types.Instance),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ // Populate TypesInfo only if needed, as it
+ // causes the type checker to work much harder.
+ if ld.Config.Mode&NeedTypesInfo != 0 {
+ lpkg.TypesInfo = &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ FileVersions: make(map[*ast.File]string),
+ }
}
- versions.InitFileVersions(lpkg.TypesInfo)
lpkg.TypesSizes = ld.sizes
importer := importerFunc(func(path string) (*types.Package, error) {
@@ -1232,6 +1249,10 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
}
}
+ // Type-checking is CPU intensive.
+ cpuLimit <- unit{} // acquire a token
+ defer func() { <-cpuLimit }() // release a token
+
typErr := types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax)
lpkg.importErrors = nil // no longer needed
@@ -1296,8 +1317,11 @@ type importerFunc func(path string) (*types.Package, error)
func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) }
// We use a counting semaphore to limit
-// the number of parallel I/O calls per process.
-var ioLimit = make(chan bool, 20)
+// the number of parallel I/O calls or CPU threads per process.
+var (
+ ioLimit = make(chan unit, 20)
+ cpuLimit = make(chan unit, runtime.GOMAXPROCS(0))
+)
func (ld *loader) parseFile(filename string) (*ast.File, error) {
ld.parseCacheMu.Lock()
@@ -1314,20 +1338,28 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
var src []byte
for f, contents := range ld.Config.Overlay {
+ // TODO(adonovan): Inefficient for large overlays.
+ // Do an exact name-based map lookup
+ // (for nonexistent files) followed by a
+ // FileID-based map lookup (for existing ones).
if sameFile(f, filename) {
src = contents
+ break
}
}
var err error
if src == nil {
- ioLimit <- true // wait
+ ioLimit <- unit{} // acquire a token
src, err = os.ReadFile(filename)
- <-ioLimit // signal
+ <-ioLimit // release a token
}
if err != nil {
v.err = err
} else {
+ // Parsing is CPU intensive.
+ cpuLimit <- unit{} // acquire a token
v.f, v.err = ld.ParseFile(ld.Fset, filename, src)
+ <-cpuLimit // release a token
}
close(v.ready)
@@ -1342,18 +1374,21 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
// Because files are scanned in parallel, the token.Pos
// positions of the resulting ast.Files are not ordered.
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
- var wg sync.WaitGroup
- n := len(filenames)
- parsed := make([]*ast.File, n)
- errors := make([]error, n)
- for i, file := range filenames {
- wg.Add(1)
- go func(i int, filename string) {
+ var (
+ n = len(filenames)
+ parsed = make([]*ast.File, n)
+ errors = make([]error, n)
+ )
+ var g errgroup.Group
+ for i, filename := range filenames {
+ // This creates goroutines unnecessarily in the
+ // cache-hit case, but that case is uncommon.
+ g.Go(func() error {
parsed[i], errors[i] = ld.parseFile(filename)
- wg.Done()
- }(i, file)
+ return nil
+ })
}
- wg.Wait()
+ g.Wait()
// Eliminate nils, preserving order.
var o int
@@ -1524,4 +1559,4 @@ func usesExportData(cfg *Config) bool {
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
}
-var _ interface{} = io.Discard // assert build toolchain is go1.16 or later
+type unit struct{}
diff --git a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
index 51fba0545..c64b03f17 100644
--- a/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
+++ b/vendor/golang.org/x/tools/go/ssa/ssautil/load.go
@@ -13,7 +13,6 @@ import (
"golang.org/x/tools/go/packages"
"golang.org/x/tools/go/ssa"
- "golang.org/x/tools/internal/versions"
)
// Packages creates an SSA program for a set of packages.
@@ -134,15 +133,15 @@ func BuildPackage(tc *types.Config, fset *token.FileSet, pkg *types.Package, fil
}
info := &types.Info{
- Types: make(map[ast.Expr]types.TypeAndValue),
- Defs: make(map[*ast.Ident]types.Object),
- Uses: make(map[*ast.Ident]types.Object),
- Implicits: make(map[ast.Node]types.Object),
- Instances: make(map[*ast.Ident]types.Instance),
- Scopes: make(map[ast.Node]*types.Scope),
- Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Implicits: make(map[ast.Node]types.Object),
+ Instances: make(map[*ast.Ident]types.Instance),
+ Scopes: make(map[ast.Node]*types.Scope),
+ Selections: make(map[*ast.SelectorExpr]*types.Selection),
+ FileVersions: make(map[*ast.File]string),
}
- versions.InitFileVersions(info)
if err := types.NewChecker(tc, fset, pkg, info).Files(files); err != nil {
return nil, nil, err
}
diff --git a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
index a70b727f2..16ed3c178 100644
--- a/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
+++ b/vendor/golang.org/x/tools/go/types/objectpath/objectpath.go
@@ -281,25 +281,25 @@ func (enc *Encoder) For(obj types.Object) (Path, error) {
T := o.Type()
if alias, ok := T.(*types.Alias); ok {
- if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam, nil); r != nil {
+ if r := findTypeParam(obj, aliases.TypeParams(alias), path, opTypeParam); r != nil {
return Path(r), nil
}
- if r := find(obj, aliases.Rhs(alias), append(path, opRhs), nil); r != nil {
+ if r := find(obj, aliases.Rhs(alias), append(path, opRhs)); r != nil {
return Path(r), nil
}
} else if tname.IsAlias() {
// legacy alias
- if r := find(obj, T, path, nil); r != nil {
+ if r := find(obj, T, path); r != nil {
return Path(r), nil
}
} else if named, ok := T.(*types.Named); ok {
// defined (named) type
- if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam, nil); r != nil {
+ if r := findTypeParam(obj, named.TypeParams(), path, opTypeParam); r != nil {
return Path(r), nil
}
- if r := find(obj, named.Underlying(), append(path, opUnderlying), nil); r != nil {
+ if r := find(obj, named.Underlying(), append(path, opUnderlying)); r != nil {
return Path(r), nil
}
}
@@ -312,7 +312,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) {
if _, ok := o.(*types.TypeName); !ok {
if o.Exported() {
// exported non-type (const, var, func)
- if r := find(obj, o.Type(), append(path, opType), nil); r != nil {
+ if r := find(obj, o.Type(), append(path, opType)); r != nil {
return Path(r), nil
}
}
@@ -332,7 +332,7 @@ func (enc *Encoder) For(obj types.Object) (Path, error) {
if m == obj {
return Path(path2), nil // found declared method
}
- if r := find(obj, m.Type(), append(path2, opType), nil); r != nil {
+ if r := find(obj, m.Type(), append(path2, opType)); r != nil {
return Path(r), nil
}
}
@@ -447,46 +447,64 @@ func (enc *Encoder) concreteMethod(meth *types.Func) (Path, bool) {
//
// The seen map is used to short circuit cycles through type parameters. If
// nil, it will be allocated as necessary.
-func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]bool) []byte {
+//
+// The seenMethods map is used internally to short circuit cycles through
+// interface methods, such as occur in the following example:
+//
+// type I interface { f() interface{I} }
+//
+// See golang/go#68046 for details.
+func find(obj types.Object, T types.Type, path []byte) []byte {
+ return (&finder{obj: obj}).find(T, path)
+}
+
+// finder closes over search state for a call to find.
+type finder struct {
+ obj types.Object // the sought object
+ seenTParamNames map[*types.TypeName]bool // for cycle breaking through type parameters
+ seenMethods map[*types.Func]bool // for cycle breaking through recursive interfaces
+}
+
+func (f *finder) find(T types.Type, path []byte) []byte {
switch T := T.(type) {
case *types.Alias:
- return find(obj, types.Unalias(T), path, seen)
+ return f.find(types.Unalias(T), path)
case *types.Basic, *types.Named:
// Named types belonging to pkg were handled already,
// so T must belong to another package. No path.
return nil
case *types.Pointer:
- return find(obj, T.Elem(), append(path, opElem), seen)
+ return f.find(T.Elem(), append(path, opElem))
case *types.Slice:
- return find(obj, T.Elem(), append(path, opElem), seen)
+ return f.find(T.Elem(), append(path, opElem))
case *types.Array:
- return find(obj, T.Elem(), append(path, opElem), seen)
+ return f.find(T.Elem(), append(path, opElem))
case *types.Chan:
- return find(obj, T.Elem(), append(path, opElem), seen)
+ return f.find(T.Elem(), append(path, opElem))
case *types.Map:
- if r := find(obj, T.Key(), append(path, opKey), seen); r != nil {
+ if r := f.find(T.Key(), append(path, opKey)); r != nil {
return r
}
- return find(obj, T.Elem(), append(path, opElem), seen)
+ return f.find(T.Elem(), append(path, opElem))
case *types.Signature:
- if r := findTypeParam(obj, T.RecvTypeParams(), path, opRecvTypeParam, nil); r != nil {
+ if r := f.findTypeParam(T.RecvTypeParams(), path, opRecvTypeParam); r != nil {
return r
}
- if r := findTypeParam(obj, T.TypeParams(), path, opTypeParam, seen); r != nil {
+ if r := f.findTypeParam(T.TypeParams(), path, opTypeParam); r != nil {
return r
}
- if r := find(obj, T.Params(), append(path, opParams), seen); r != nil {
+ if r := f.find(T.Params(), append(path, opParams)); r != nil {
return r
}
- return find(obj, T.Results(), append(path, opResults), seen)
+ return f.find(T.Results(), append(path, opResults))
case *types.Struct:
for i := 0; i < T.NumFields(); i++ {
fld := T.Field(i)
path2 := appendOpArg(path, opField, i)
- if fld == obj {
+ if fld == f.obj {
return path2 // found field var
}
- if r := find(obj, fld.Type(), append(path2, opType), seen); r != nil {
+ if r := f.find(fld.Type(), append(path2, opType)); r != nil {
return r
}
}
@@ -495,10 +513,10 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]
for i := 0; i < T.Len(); i++ {
v := T.At(i)
path2 := appendOpArg(path, opAt, i)
- if v == obj {
+ if v == f.obj {
return path2 // found param/result var
}
- if r := find(obj, v.Type(), append(path2, opType), seen); r != nil {
+ if r := f.find(v.Type(), append(path2, opType)); r != nil {
return r
}
}
@@ -506,28 +524,35 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]
case *types.Interface:
for i := 0; i < T.NumMethods(); i++ {
m := T.Method(i)
+ if f.seenMethods[m] {
+ return nil
+ }
path2 := appendOpArg(path, opMethod, i)
- if m == obj {
+ if m == f.obj {
return path2 // found interface method
}
- if r := find(obj, m.Type(), append(path2, opType), seen); r != nil {
+ if f.seenMethods == nil {
+ f.seenMethods = make(map[*types.Func]bool)
+ }
+ f.seenMethods[m] = true
+ if r := f.find(m.Type(), append(path2, opType)); r != nil {
return r
}
}
return nil
case *types.TypeParam:
name := T.Obj()
- if name == obj {
- return append(path, opObj)
- }
- if seen[name] {
+ if f.seenTParamNames[name] {
return nil
}
- if seen == nil {
- seen = make(map[*types.TypeName]bool)
+ if name == f.obj {
+ return append(path, opObj)
}
- seen[name] = true
- if r := find(obj, T.Constraint(), append(path, opConstraint), seen); r != nil {
+ if f.seenTParamNames == nil {
+ f.seenTParamNames = make(map[*types.TypeName]bool)
+ }
+ f.seenTParamNames[name] = true
+ if r := f.find(T.Constraint(), append(path, opConstraint)); r != nil {
return r
}
return nil
@@ -535,11 +560,15 @@ func find(obj types.Object, T types.Type, path []byte, seen map[*types.TypeName]
panic(T)
}
-func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte, seen map[*types.TypeName]bool) []byte {
+func findTypeParam(obj types.Object, list *types.TypeParamList, path []byte, op byte) []byte {
+ return (&finder{obj: obj}).findTypeParam(list, path, op)
+}
+
+func (f *finder) findTypeParam(list *types.TypeParamList, path []byte, op byte) []byte {
for i := 0; i < list.Len(); i++ {
tparam := list.At(i)
path2 := appendOpArg(path, op, i)
- if r := find(obj, tparam, path2, seen); r != nil {
+ if r := f.find(tparam, path2); r != nil {
return r
}
}
diff --git a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
index 24755b412..4ccaa210a 100644
--- a/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
+++ b/vendor/golang.org/x/tools/internal/analysisinternal/analysis.go
@@ -10,6 +10,7 @@ import (
"bytes"
"fmt"
"go/ast"
+ "go/scanner"
"go/token"
"go/types"
"os"
@@ -21,12 +22,46 @@ import (
func TypeErrorEndPos(fset *token.FileSet, src []byte, start token.Pos) token.Pos {
// Get the end position for the type error.
- offset, end := fset.PositionFor(start, false).Offset, start
- if offset >= len(src) {
- return end
+ file := fset.File(start)
+ if file == nil {
+ return start
}
- if width := bytes.IndexAny(src[offset:], " \n,():;[]+-*"); width > 0 {
- end = start + token.Pos(width)
+ if offset := file.PositionFor(start, false).Offset; offset > len(src) {
+ return start
+ } else {
+ src = src[offset:]
+ }
+
+ // Attempt to find a reasonable end position for the type error.
+ //
+ // TODO(rfindley): the heuristic implemented here is unclear. It looks like
+ // it seeks the end of the primary operand starting at start, but that is not
+ // quite implemented (for example, given a func literal this heuristic will
+ // return the range of the func keyword).
+ //
+ // We should formalize this heuristic, or deprecate it by finally proposing
+ // to add end position to all type checker errors.
+ //
+ // Nevertheless, ensure that the end position at least spans the current
+ // token at the cursor (this was golang/go#69505).
+ end := start
+ {
+ var s scanner.Scanner
+ fset := token.NewFileSet()
+ f := fset.AddFile("", fset.Base(), len(src))
+ s.Init(f, src, nil /* no error handler */, scanner.ScanComments)
+ pos, tok, lit := s.Scan()
+ if tok != token.SEMICOLON && token.Pos(f.Base()) <= pos && pos <= token.Pos(f.Base()+f.Size()) {
+ off := file.Offset(pos) + len(lit)
+ src = src[off:]
+ end += token.Pos(off)
+ }
+ }
+
+ // Look for bytes that might terminate the current operand. See note above:
+ // this is imprecise.
+ if width := bytes.IndexAny(src, " \n,():;[]+-*/"); width > 0 {
+ end += token.Pos(width)
}
return end
}
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
index 1e19fbed8..7dfc31a37 100644
--- a/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iexport.go
@@ -246,6 +246,26 @@ import (
// IExportShallow encodes "shallow" export data for the specified package.
//
+// For types, we use "shallow" export data. Historically, the Go
+// compiler always produced a summary of the types for a given package
+// that included types from other packages that it indirectly
+// referenced: "deep" export data. This had the advantage that the
+// compiler (and analogous tools such as gopls) need only load one
+// file per direct import. However, it meant that the files tended to
+// get larger based on the level of the package in the import
+// graph. For example, higher-level packages in the kubernetes module
+// have over 1MB of "deep" export data, even when they have almost no
+// content of their own, merely because they mention a major type that
+// references many others. In pathological cases the export data was
+// 300x larger than the source for a package due to this quadratic
+// growth.
+//
+// "Shallow" export data means that the serialized types describe only
+// a single package. If those types mention types from other packages,
+// the type checker may need to request additional packages beyond
+// just the direct imports. Type information for the entire transitive
+// closure of imports is provided (lazily) by the DAG.
+//
// No promises are made about the encoding other than that it can be decoded by
// the same version of IIExportShallow. If you plan to save export data in the
// file system, be sure to include a cryptographic digest of the executable in
@@ -268,8 +288,8 @@ func IExportShallow(fset *token.FileSet, pkg *types.Package, reportf ReportFunc)
}
// IImportShallow decodes "shallow" types.Package data encoded by
-// IExportShallow in the same executable. This function cannot import data from
-// cmd/compile or gcexportdata.Write.
+// [IExportShallow] in the same executable. This function cannot import data
+// from cmd/compile or gcexportdata.Write.
//
// The importer calls getPackages to obtain package symbols for all
// packages mentioned in the export data, including the one being
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
index 21908a158..e260c0e8d 100644
--- a/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport.go
@@ -558,6 +558,14 @@ type importReader struct {
prevColumn int64
}
+// markBlack is redefined in iimport_go123.go, to work around golang/go#69912.
+//
+// If TypeNames are not marked black (in the sense of go/types cycle
+// detection), they may be mutated when dot-imported. Fix this by punching a
+// hole through the type, when compiling with Go 1.23. (The bug has been fixed
+// for 1.24, but the fix was not worth back-porting).
+var markBlack = func(name *types.TypeName) {}
+
func (r *importReader) obj(name string) {
tag := r.byte()
pos := r.pos()
@@ -570,6 +578,7 @@ func (r *importReader) obj(name string) {
}
typ := r.typ()
obj := aliases.NewAlias(r.p.aliases, pos, r.currPkg, name, typ, tparams)
+ markBlack(obj) // workaround for golang/go#69912
r.declare(obj)
case constTag:
@@ -590,6 +599,9 @@ func (r *importReader) obj(name string) {
// declaration before recursing.
obj := types.NewTypeName(pos, r.currPkg, name, nil)
named := types.NewNamed(obj, nil, nil)
+
+ markBlack(obj) // workaround for golang/go#69912
+
// Declare obj before calling r.tparamList, so the new type name is recognized
// if used in the constraint of one of its own typeparams (see #48280).
r.declare(obj)
diff --git a/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go b/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go
new file mode 100644
index 000000000..7586bfaca
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gcimporter/iimport_go122.go
@@ -0,0 +1,53 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+//go:build go1.22 && !go1.24
+
+package gcimporter
+
+import (
+ "go/token"
+ "go/types"
+ "unsafe"
+)
+
+// TODO(rfindley): delete this workaround once go1.24 is assured.
+
+func init() {
+ // Update markBlack so that it correctly sets the color
+ // of imported TypeNames.
+ //
+ // See the doc comment for markBlack for details.
+
+ type color uint32
+ const (
+ white color = iota
+ black
+ grey
+ )
+ type object struct {
+ _ *types.Scope
+ _ token.Pos
+ _ *types.Package
+ _ string
+ _ types.Type
+ _ uint32
+ color_ color
+ _ token.Pos
+ }
+ type typeName struct {
+ object
+ }
+
+ // If the size of types.TypeName changes, this will fail to compile.
+ const delta = int64(unsafe.Sizeof(typeName{})) - int64(unsafe.Sizeof(types.TypeName{}))
+ var _ [-delta * delta]int
+
+ markBlack = func(obj *types.TypeName) {
+ type uP = unsafe.Pointer
+ var ptr *typeName
+ *(*uP)(uP(&ptr)) = uP(obj)
+ ptr.color_ = black
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
index c15108178..5ae576977 100644
--- a/vendor/golang.org/x/tools/internal/imports/fix.go
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -27,7 +27,6 @@ import (
"unicode"
"unicode/utf8"
- "golang.org/x/sync/errgroup"
"golang.org/x/tools/go/ast/astutil"
"golang.org/x/tools/internal/event"
"golang.org/x/tools/internal/gocommand"
@@ -91,18 +90,6 @@ type ImportFix struct {
Relevance float64 // see pkg
}
-// An ImportInfo represents a single import statement.
-type ImportInfo struct {
- ImportPath string // import path, e.g. "crypto/rand".
- Name string // import name, e.g. "crand", or "" if none.
-}
-
-// A packageInfo represents what's known about a package.
-type packageInfo struct {
- name string // real package name, if known.
- exports map[string]bool // known exports.
-}
-
// parseOtherFiles parses all the Go files in srcDir except filename, including
// test files if filename looks like a test.
//
@@ -162,8 +149,8 @@ func addGlobals(f *ast.File, globals map[string]bool) {
// collectReferences builds a map of selector expressions, from
// left hand side (X) to a set of right hand sides (Sel).
-func collectReferences(f *ast.File) references {
- refs := references{}
+func collectReferences(f *ast.File) References {
+ refs := References{}
var visitor visitFn
visitor = func(node ast.Node) ast.Visitor {
@@ -233,7 +220,7 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
allFound := true
for right := range syms {
- if !pkgInfo.exports[right] {
+ if !pkgInfo.Exports[right] {
allFound = false
break
}
@@ -246,11 +233,6 @@ func (p *pass) findMissingImport(pkg string, syms map[string]bool) *ImportInfo {
return nil
}
-// references is set of references found in a Go file. The first map key is the
-// left hand side of a selector expression, the second key is the right hand
-// side, and the value should always be true.
-type references map[string]map[string]bool
-
// A pass contains all the inputs and state necessary to fix a file's imports.
// It can be modified in some ways during use; see comments below.
type pass struct {
@@ -258,27 +240,29 @@ type pass struct {
fset *token.FileSet // fset used to parse f and its siblings.
f *ast.File // the file being fixed.
srcDir string // the directory containing f.
- env *ProcessEnv // the environment to use for go commands, etc.
- loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
- otherFiles []*ast.File // sibling files.
+ logf func(string, ...any)
+ source Source // the environment to use for go commands, etc.
+ loadRealPackageNames bool // if true, load package names from disk rather than guessing them.
+ otherFiles []*ast.File // sibling files.
+ goroot string
// Intermediate state, generated by load.
existingImports map[string][]*ImportInfo
- allRefs references
- missingRefs references
+ allRefs References
+ missingRefs References
// Inputs to fix. These can be augmented between successive fix calls.
lastTry bool // indicates that this is the last call and fix should clean up as best it can.
candidates []*ImportInfo // candidate imports in priority order.
- knownPackages map[string]*packageInfo // information about all known packages.
+ knownPackages map[string]*PackageInfo // information about all known packages.
}
// loadPackageNames saves the package names for everything referenced by imports.
-func (p *pass) loadPackageNames(imports []*ImportInfo) error {
- if p.env.Logf != nil {
- p.env.Logf("loading package names for %v packages", len(imports))
+func (p *pass) loadPackageNames(ctx context.Context, imports []*ImportInfo) error {
+ if p.logf != nil {
+ p.logf("loading package names for %v packages", len(imports))
defer func() {
- p.env.Logf("done loading package names for %v packages", len(imports))
+ p.logf("done loading package names for %v packages", len(imports))
}()
}
var unknown []string
@@ -289,20 +273,17 @@ func (p *pass) loadPackageNames(imports []*ImportInfo) error {
unknown = append(unknown, imp.ImportPath)
}
- resolver, err := p.env.GetResolver()
- if err != nil {
- return err
- }
-
- names, err := resolver.loadPackageNames(unknown, p.srcDir)
+ names, err := p.source.LoadPackageNames(ctx, p.srcDir, unknown)
if err != nil {
return err
}
+ // TODO(rfindley): revisit this. Why do we need to store known packages with
+ // no exports? The inconsistent data is confusing.
for path, name := range names {
- p.knownPackages[path] = &packageInfo{
- name: name,
- exports: map[string]bool{},
+ p.knownPackages[path] = &PackageInfo{
+ Name: name,
+ Exports: map[string]bool{},
}
}
return nil
@@ -330,8 +311,8 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
return imp.Name
}
known := p.knownPackages[imp.ImportPath]
- if known != nil && known.name != "" {
- return withoutVersion(known.name)
+ if known != nil && known.Name != "" {
+ return withoutVersion(known.Name)
}
return ImportPathToAssumedName(imp.ImportPath)
}
@@ -339,9 +320,9 @@ func (p *pass) importIdentifier(imp *ImportInfo) string {
// load reads in everything necessary to run a pass, and reports whether the
// file already has all the imports it needs. It fills in p.missingRefs with the
// file's missing symbols, if any, or removes unused imports if not.
-func (p *pass) load() ([]*ImportFix, bool) {
- p.knownPackages = map[string]*packageInfo{}
- p.missingRefs = references{}
+func (p *pass) load(ctx context.Context) ([]*ImportFix, bool) {
+ p.knownPackages = map[string]*PackageInfo{}
+ p.missingRefs = References{}
p.existingImports = map[string][]*ImportInfo{}
// Load basic information about the file in question.
@@ -364,9 +345,11 @@ func (p *pass) load() ([]*ImportFix, bool) {
// f's imports by the identifier they introduce.
imports := collectImports(p.f)
if p.loadRealPackageNames {
- err := p.loadPackageNames(append(imports, p.candidates...))
+ err := p.loadPackageNames(ctx, append(imports, p.candidates...))
if err != nil {
- p.env.logf("loading package names: %v", err)
+ if p.logf != nil {
+ p.logf("loading package names: %v", err)
+ }
return nil, false
}
}
@@ -535,9 +518,10 @@ func (p *pass) assumeSiblingImportsValid() {
// We have the stdlib in memory; no need to guess.
rights = symbolNameSet(m)
}
- p.addCandidate(imp, &packageInfo{
+ // TODO(rfindley): we should set package name here, for consistency.
+ p.addCandidate(imp, &PackageInfo{
// no name; we already know it.
- exports: rights,
+ Exports: rights,
})
}
}
@@ -546,14 +530,14 @@ func (p *pass) assumeSiblingImportsValid() {
// addCandidate adds a candidate import to p, and merges in the information
// in pkg.
-func (p *pass) addCandidate(imp *ImportInfo, pkg *packageInfo) {
+func (p *pass) addCandidate(imp *ImportInfo, pkg *PackageInfo) {
p.candidates = append(p.candidates, imp)
if existing, ok := p.knownPackages[imp.ImportPath]; ok {
- if existing.name == "" {
- existing.name = pkg.name
+ if existing.Name == "" {
+ existing.Name = pkg.Name
}
- for export := range pkg.exports {
- existing.exports[export] = true
+ for export := range pkg.Exports {
+ existing.Exports[export] = true
}
} else {
p.knownPackages[imp.ImportPath] = pkg
@@ -581,19 +565,42 @@ func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *P
// getFixes gets the import fixes that need to be made to f in order to fix the imports.
// It does not modify the ast.
func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) ([]*ImportFix, error) {
+ source, err := NewProcessEnvSource(env, filename, f.Name.Name)
+ if err != nil {
+ return nil, err
+ }
+ goEnv, err := env.goEnv()
+ if err != nil {
+ return nil, err
+ }
+ return getFixesWithSource(ctx, fset, f, filename, goEnv["GOROOT"], env.logf, source)
+}
+
+func getFixesWithSource(ctx context.Context, fset *token.FileSet, f *ast.File, filename string, goroot string, logf func(string, ...any), source Source) ([]*ImportFix, error) {
+ // This logic is defensively duplicated from getFixes.
abs, err := filepath.Abs(filename)
if err != nil {
return nil, err
}
srcDir := filepath.Dir(abs)
- env.logf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir)
+
+ if logf != nil {
+ logf("fixImports(filename=%q), srcDir=%q ...", filename, abs, srcDir)
+ }
// First pass: looking only at f, and using the naive algorithm to
// derive package names from import paths, see if the file is already
// complete. We can't add any imports yet, because we don't know
// if missing references are actually package vars.
- p := &pass{fset: fset, f: f, srcDir: srcDir, env: env}
- if fixes, done := p.load(); done {
+ p := &pass{
+ fset: fset,
+ f: f,
+ srcDir: srcDir,
+ logf: logf,
+ goroot: goroot,
+ source: source,
+ }
+ if fixes, done := p.load(ctx); done {
return fixes, nil
}
@@ -605,7 +612,7 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st
// Second pass: add information from other files in the same package,
// like their package vars and imports.
p.otherFiles = otherFiles
- if fixes, done := p.load(); done {
+ if fixes, done := p.load(ctx); done {
return fixes, nil
}
@@ -618,10 +625,17 @@ func getFixes(ctx context.Context, fset *token.FileSet, f *ast.File, filename st
// Third pass: get real package names where we had previously used
// the naive algorithm.
- p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
+ p = &pass{
+ fset: fset,
+ f: f,
+ srcDir: srcDir,
+ logf: logf,
+ goroot: goroot,
+ source: p.source, // safe to reuse, as it's just a wrapper around env
+ }
p.loadRealPackageNames = true
p.otherFiles = otherFiles
- if fixes, done := p.load(); done {
+ if fixes, done := p.load(ctx); done {
return fixes, nil
}
@@ -835,7 +849,7 @@ func GetPackageExports(ctx context.Context, wrapped func(PackageExport), searchP
return true
},
dirFound: func(pkg *pkg) bool {
- return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
+ return pkgIsCandidate(filename, References{searchPkg: nil}, pkg)
},
packageNameLoaded: func(pkg *pkg) bool {
return pkg.packageName == searchPkg
@@ -1086,11 +1100,7 @@ func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string)
return e.GocmdRunner.Run(ctx, inv)
}
-func addStdlibCandidates(pass *pass, refs references) error {
- goenv, err := pass.env.goEnv()
- if err != nil {
- return err
- }
+func addStdlibCandidates(pass *pass, refs References) error {
localbase := func(nm string) string {
ans := path.Base(nm)
if ans[0] == 'v' {
@@ -1105,13 +1115,13 @@ func addStdlibCandidates(pass *pass, refs references) error {
}
add := func(pkg string) {
// Prevent self-imports.
- if path.Base(pkg) == pass.f.Name.Name && filepath.Join(goenv["GOROOT"], "src", pkg) == pass.srcDir {
+ if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.goroot, "src", pkg) == pass.srcDir {
return
}
exports := symbolNameSet(stdlib.PackageSymbols[pkg])
pass.addCandidate(
&ImportInfo{ImportPath: pkg},
- &packageInfo{name: localbase(pkg), exports: exports})
+ &PackageInfo{Name: localbase(pkg), Exports: exports})
}
for left := range refs {
if left == "rand" {
@@ -1175,91 +1185,14 @@ type scanCallback struct {
exportsLoaded func(pkg *pkg, exports []stdlib.Symbol)
}
-func addExternalCandidates(ctx context.Context, pass *pass, refs references, filename string) error {
+func addExternalCandidates(ctx context.Context, pass *pass, refs References, filename string) error {
ctx, done := event.Start(ctx, "imports.addExternalCandidates")
defer done()
- var mu sync.Mutex
- found := make(map[string][]pkgDistance)
- callback := &scanCallback{
- rootFound: func(gopathwalk.Root) bool {
- return true // We want everything.
- },
- dirFound: func(pkg *pkg) bool {
- return pkgIsCandidate(filename, refs, pkg)
- },
- packageNameLoaded: func(pkg *pkg) bool {
- if _, want := refs[pkg.packageName]; !want {
- return false
- }
- if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
- // The candidate is in the same directory and has the
- // same package name. Don't try to import ourselves.
- return false
- }
- if !canUse(filename, pkg.dir) {
- return false
- }
- mu.Lock()
- defer mu.Unlock()
- found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
- return false // We'll do our own loading after we sort.
- },
- }
- resolver, err := pass.env.GetResolver()
+ results, err := pass.source.ResolveReferences(ctx, filename, refs)
if err != nil {
return err
}
- if err = resolver.scan(ctx, callback); err != nil {
- return err
- }
-
- // Search for imports matching potential package references.
- type result struct {
- imp *ImportInfo
- pkg *packageInfo
- }
- results := make([]*result, len(refs))
-
- g, ctx := errgroup.WithContext(ctx)
-
- searcher := symbolSearcher{
- logf: pass.env.logf,
- srcDir: pass.srcDir,
- xtest: strings.HasSuffix(pass.f.Name.Name, "_test"),
- loadExports: resolver.loadExports,
- }
-
- i := 0
- for pkgName, symbols := range refs {
- index := i // claim an index in results
- i++
- pkgName := pkgName
- symbols := symbols
-
- g.Go(func() error {
- found, err := searcher.search(ctx, found[pkgName], pkgName, symbols)
- if err != nil {
- return err
- }
- if found == nil {
- return nil // No matching package.
- }
-
- imp := &ImportInfo{
- ImportPath: found.importPathShort,
- }
- pkg := &packageInfo{
- name: pkgName,
- exports: symbols,
- }
- results[index] = &result{imp, pkg}
- return nil
- })
- }
- if err := g.Wait(); err != nil {
- return err
- }
for _, result := range results {
if result == nil {
@@ -1267,7 +1200,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil
}
// Don't offer completions that would shadow predeclared
// names, such as github.com/coreos/etcd/error.
- if types.Universe.Lookup(result.pkg.name) != nil { // predeclared
+ if types.Universe.Lookup(result.Package.Name) != nil { // predeclared
// Ideally we would skip this candidate only
// if the predeclared name is actually
// referenced by the file, but that's a lot
@@ -1276,7 +1209,7 @@ func addExternalCandidates(ctx context.Context, pass *pass, refs references, fil
// user before long.
continue
}
- pass.addCandidate(result.imp, result.pkg)
+ pass.addCandidate(result.Import, result.Package)
}
return nil
}
@@ -1801,7 +1734,7 @@ func (s *symbolSearcher) searchOne(ctx context.Context, c pkgDistance, symbols m
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
-func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
+func pkgIsCandidate(filename string, refs References, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
index ff6b59a58..2215a1288 100644
--- a/vendor/golang.org/x/tools/internal/imports/imports.go
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -47,7 +47,14 @@ type Options struct {
// Process implements golang.org/x/tools/imports.Process with explicit context in opt.Env.
func Process(filename string, src []byte, opt *Options) (formatted []byte, err error) {
fileSet := token.NewFileSet()
- file, adjust, err := parse(fileSet, filename, src, opt)
+ var parserMode parser.Mode
+ if opt.Comments {
+ parserMode |= parser.ParseComments
+ }
+ if opt.AllErrors {
+ parserMode |= parser.AllErrors
+ }
+ file, adjust, err := parse(fileSet, filename, src, parserMode, opt.Fragment)
if err != nil {
return nil, err
}
@@ -66,17 +73,19 @@ func Process(filename string, src []byte, opt *Options) (formatted []byte, err e
//
// Note that filename's directory influences which imports can be chosen,
// so it is important that filename be accurate.
-func FixImports(ctx context.Context, filename string, src []byte, opt *Options) (fixes []*ImportFix, err error) {
+func FixImports(ctx context.Context, filename string, src []byte, goroot string, logf func(string, ...any), source Source) (fixes []*ImportFix, err error) {
ctx, done := event.Start(ctx, "imports.FixImports")
defer done()
fileSet := token.NewFileSet()
- file, _, err := parse(fileSet, filename, src, opt)
+ // TODO(rfindley): these default values for ParseComments and AllErrors were
+ // extracted from gopls, but are they even needed?
+ file, _, err := parse(fileSet, filename, src, parser.ParseComments|parser.AllErrors, true)
if err != nil {
return nil, err
}
- return getFixes(ctx, fileSet, file, filename, opt.Env)
+ return getFixesWithSource(ctx, fileSet, file, filename, goroot, logf, source)
}
// ApplyFixes applies all of the fixes to the file and formats it. extraMode
@@ -114,7 +123,7 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
// formatted file, and returns the postpocessed result.
func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(orig []byte, src []byte) []byte, opt *Options) ([]byte, error) {
mergeImports(file)
- sortImports(opt.LocalPrefix, fset.File(file.Pos()), file)
+ sortImports(opt.LocalPrefix, fset.File(file.FileStart), file)
var spacesBefore []string // import paths we need spaces before
for _, impSection := range astutil.Imports(fset, file) {
// Within each block of contiguous imports, see if any
@@ -164,13 +173,9 @@ func formatFile(fset *token.FileSet, file *ast.File, src []byte, adjust func(ori
// parse parses src, which was read from filename,
// as a Go source file or statement list.
-func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) {
- var parserMode parser.Mode // legacy ast.Object resolution is required here
- if opt.Comments {
- parserMode |= parser.ParseComments
- }
- if opt.AllErrors {
- parserMode |= parser.AllErrors
+func parse(fset *token.FileSet, filename string, src []byte, parserMode parser.Mode, fragment bool) (*ast.File, func(orig, src []byte) []byte, error) {
+ if parserMode&parser.SkipObjectResolution != 0 {
+ panic("legacy ast.Object resolution is required")
}
// Try as whole source file.
@@ -181,7 +186,7 @@ func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast
// If the error is that the source file didn't begin with a
// package line and we accept fragmented input, fall through to
// try as a source fragment. Stop and return on any other error.
- if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") {
+ if !fragment || !strings.Contains(err.Error(), "expected 'package'") {
return nil, nil, err
}
diff --git a/vendor/golang.org/x/tools/internal/imports/source.go b/vendor/golang.org/x/tools/internal/imports/source.go
new file mode 100644
index 000000000..5d2aeeebc
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/source.go
@@ -0,0 +1,63 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import "context"
+
+// These types document the APIs below.
+//
+// TODO(rfindley): consider making these defined types rather than aliases.
+type (
+ ImportPath = string
+ PackageName = string
+ Symbol = string
+
+ // References is set of References found in a Go file. The first map key is the
+ // left hand side of a selector expression, the second key is the right hand
+ // side, and the value should always be true.
+ References = map[PackageName]map[Symbol]bool
+)
+
+// A Result satisfies a missing import.
+//
+// The Import field describes the missing import spec, and the Package field
+// summarizes the package exports.
+type Result struct {
+ Import *ImportInfo
+ Package *PackageInfo
+}
+
+// An ImportInfo represents a single import statement.
+type ImportInfo struct {
+ ImportPath string // import path, e.g. "crypto/rand".
+ Name string // import name, e.g. "crand", or "" if none.
+}
+
+// A PackageInfo represents what's known about a package.
+type PackageInfo struct {
+ Name string // package name in the package declaration, if known
+ Exports map[string]bool // set of names of known package level sortSymbols
+}
+
+// A Source provides imports to satisfy unresolved references in the file being
+// fixed.
+type Source interface {
+ // LoadPackageNames queries PackageName information for the requested import
+ // paths, when operating from the provided srcDir.
+ //
+ // TODO(rfindley): try to refactor to remove this operation.
+ LoadPackageNames(ctx context.Context, srcDir string, paths []ImportPath) (map[ImportPath]PackageName, error)
+
+ // ResolveReferences asks the Source for the best package name to satisfy
+ // each of the missing references, in the context of fixing the given
+ // filename.
+ //
+ // Returns a map from package name to a [Result] for that package name that
+ // provides the required symbols. Keys may be omitted in the map if no
+ // candidates satisfy all missing references for that package name. It is up
+ // to each data source to select the best result for each entry in the
+ // missing map.
+ ResolveReferences(ctx context.Context, filename string, missing References) (map[PackageName]*Result, error)
+}
diff --git a/vendor/golang.org/x/tools/internal/imports/source_env.go b/vendor/golang.org/x/tools/internal/imports/source_env.go
new file mode 100644
index 000000000..ff9555d28
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/imports/source_env.go
@@ -0,0 +1,125 @@
+// Copyright 2024 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package imports
+
+import (
+ "context"
+ "path/filepath"
+ "strings"
+ "sync"
+
+ "golang.org/x/sync/errgroup"
+ "golang.org/x/tools/internal/gopathwalk"
+)
+
+// ProcessEnvSource implements the [Source] interface using the legacy
+// [ProcessEnv] abstraction.
+type ProcessEnvSource struct {
+ env *ProcessEnv
+ srcDir string
+ filename string
+ pkgName string
+}
+
+// NewProcessEnvSource returns a [ProcessEnvSource] wrapping the given
+// env, to be used for fixing imports in the file with name filename in package
+// named pkgName.
+func NewProcessEnvSource(env *ProcessEnv, filename, pkgName string) (*ProcessEnvSource, error) {
+ abs, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, err
+ }
+ srcDir := filepath.Dir(abs)
+ return &ProcessEnvSource{
+ env: env,
+ srcDir: srcDir,
+ filename: filename,
+ pkgName: pkgName,
+ }, nil
+}
+
+func (s *ProcessEnvSource) LoadPackageNames(ctx context.Context, srcDir string, unknown []string) (map[string]string, error) {
+ r, err := s.env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+ return r.loadPackageNames(unknown, srcDir)
+}
+
+func (s *ProcessEnvSource) ResolveReferences(ctx context.Context, filename string, refs map[string]map[string]bool) (map[string]*Result, error) {
+ var mu sync.Mutex
+ found := make(map[string][]pkgDistance)
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true // We want everything.
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, refs, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if _, want := refs[pkg.packageName]; !want {
+ return false
+ }
+ if pkg.dir == s.srcDir && s.pkgName == pkg.packageName {
+ // The candidate is in the same directory and has the
+ // same package name. Don't try to import ourselves.
+ return false
+ }
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(s.srcDir, pkg.dir)})
+ return false // We'll do our own loading after we sort.
+ },
+ }
+ resolver, err := s.env.GetResolver()
+ if err != nil {
+ return nil, err
+ }
+ if err := resolver.scan(ctx, callback); err != nil {
+ return nil, err
+ }
+
+ g, ctx := errgroup.WithContext(ctx)
+
+ searcher := symbolSearcher{
+ logf: s.env.logf,
+ srcDir: s.srcDir,
+ xtest: strings.HasSuffix(s.pkgName, "_test"),
+ loadExports: resolver.loadExports,
+ }
+
+ var resultMu sync.Mutex
+ results := make(map[string]*Result, len(refs))
+ for pkgName, symbols := range refs {
+ g.Go(func() error {
+ found, err := searcher.search(ctx, found[pkgName], pkgName, symbols)
+ if err != nil {
+ return err
+ }
+ if found == nil {
+ return nil // No matching package.
+ }
+
+ imp := &ImportInfo{
+ ImportPath: found.importPathShort,
+ }
+ pkg := &PackageInfo{
+ Name: pkgName,
+ Exports: symbols,
+ }
+ resultMu.Lock()
+ results[pkgName] = &Result{Import: imp, Package: pkg}
+ resultMu.Unlock()
+ return nil
+ })
+ }
+ if err := g.Wait(); err != nil {
+ return nil, err
+ }
+ return results, nil
+}
diff --git a/vendor/golang.org/x/tools/internal/testenv/testenv.go b/vendor/golang.org/x/tools/internal/testenv/testenv.go
index d4a17ce03..70c186b13 100644
--- a/vendor/golang.org/x/tools/internal/testenv/testenv.go
+++ b/vendor/golang.org/x/tools/internal/testenv/testenv.go
@@ -8,6 +8,7 @@ package testenv
import (
"bytes"
+ "context"
"fmt"
"go/build"
"os"
@@ -21,6 +22,7 @@ import (
"time"
"golang.org/x/mod/modfile"
+ "golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/goroot"
)
@@ -323,6 +325,36 @@ func Go1Point() int {
panic("bad release tags")
}
+// NeedsGoCommand1Point skips t if the ambient go command version in the PATH
+// of the current process is older than 1.x.
+//
+// NeedsGoCommand1Point memoizes the result of running the go command, so
+// should be called after all mutations of PATH.
+func NeedsGoCommand1Point(t testing.TB, x int) {
+ NeedsTool(t, "go")
+ go1point, err := goCommand1Point()
+ if err != nil {
+ panic(fmt.Sprintf("unable to determine go version: %v", err))
+ }
+ if go1point < x {
+ t.Helper()
+ t.Skipf("go command is version 1.%d, older than required 1.%d", go1point, x)
+ }
+}
+
+var (
+ goCommand1PointOnce sync.Once
+ goCommand1Point_ int
+ goCommand1PointErr error
+)
+
+func goCommand1Point() (int, error) {
+ goCommand1PointOnce.Do(func() {
+ goCommand1Point_, goCommand1PointErr = gocommand.GoVersion(context.Background(), gocommand.Invocation{}, new(gocommand.Runner))
+ })
+ return goCommand1Point_, goCommand1PointErr
+}
+
// NeedsGo1Point skips t if the Go version used to run the test is older than
// 1.x.
func NeedsGo1Point(t testing.TB, x int) {
@@ -332,6 +364,23 @@ func NeedsGo1Point(t testing.TB, x int) {
}
}
+// SkipAfterGo1Point skips t if the ambient go command version in the PATH of
+// the current process is newer than 1.x.
+//
+// SkipAfterGoCommand1Point memoizes the result of running the go command, so
+// should be called after any mutation of PATH.
+func SkipAfterGoCommand1Point(t testing.TB, x int) {
+ NeedsTool(t, "go")
+ go1point, err := goCommand1Point()
+ if err != nil {
+ panic(fmt.Sprintf("unable to determine go version: %v", err))
+ }
+ if go1point > x {
+ t.Helper()
+ t.Skipf("go command is version 1.%d, newer than maximum 1.%d", go1point, x)
+ }
+}
+
// SkipAfterGo1Point skips t if the Go version used to run the test is newer than
// 1.x.
func SkipAfterGo1Point(t testing.TB, x int) {
@@ -490,3 +539,17 @@ func NeedsGoExperiment(t testing.TB, flag string) {
t.Skipf("skipping test: flag %q is not set in GOEXPERIMENT=%q", flag, goexp)
}
}
+
+// NeedsGOROOTDir skips the test if GOROOT/dir does not exist, and GOROOT is a
+// released version of Go (=has a VERSION file). Some GOROOT directories are
+// removed by cmd/distpack.
+//
+// See also golang/go#70081.
+func NeedsGOROOTDir(t *testing.T, dir string) {
+ gorootTest := filepath.Join(GOROOT(t), dir)
+ if _, err := os.Stat(gorootTest); os.IsNotExist(err) {
+ if _, err := os.Stat(filepath.Join(GOROOT(t), "VERSION")); err == nil {
+ t.Skipf("skipping: GOROOT/%s not present", dir)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/tools/internal/typeparams/free.go b/vendor/golang.org/x/tools/internal/typeparams/free.go
index 358108268..0ade5c294 100644
--- a/vendor/golang.org/x/tools/internal/typeparams/free.go
+++ b/vendor/golang.org/x/tools/internal/typeparams/free.go
@@ -6,6 +6,8 @@ package typeparams
import (
"go/types"
+
+ "golang.org/x/tools/internal/aliases"
)
// Free is a memoization of the set of free type parameters within a
@@ -36,6 +38,18 @@ func (w *Free) Has(typ types.Type) (res bool) {
break
case *types.Alias:
+ if aliases.TypeParams(t).Len() > aliases.TypeArgs(t).Len() {
+ return true // This is an uninstantiated Alias.
+ }
+ // The expansion of an alias can have free type parameters,
+ // whether or not the alias itself has type parameters:
+ //
+ // func _[K comparable]() {
+ // type Set = map[K]bool // free(Set) = {K}
+ // type MapTo[V] = map[K]V // free(Map[foo]) = {V}
+ // }
+ //
+ // So, we must Unalias.
return w.Has(types.Unalias(t))
case *types.Array:
@@ -96,9 +110,8 @@ func (w *Free) Has(typ types.Type) (res bool) {
case *types.Named:
args := t.TypeArgs()
- // TODO(taking): this does not match go/types/infer.go. Check with rfindley.
if params := t.TypeParams(); params.Len() > args.Len() {
- return true
+ return true // this is an uninstantiated named type.
}
for i, n := 0, args.Len(); i < n; i++ {
if w.Has(args.At(i)) {
diff --git a/vendor/golang.org/x/tools/internal/typesinternal/types.go b/vendor/golang.org/x/tools/internal/typesinternal/types.go
index 839232861..df3ea5212 100644
--- a/vendor/golang.org/x/tools/internal/typesinternal/types.go
+++ b/vendor/golang.org/x/tools/internal/typesinternal/types.go
@@ -11,6 +11,8 @@ import (
"go/types"
"reflect"
"unsafe"
+
+ "golang.org/x/tools/internal/aliases"
)
func SetUsesCgo(conf *types.Config) bool {
@@ -63,3 +65,57 @@ func NameRelativeTo(pkg *types.Package) types.Qualifier {
return other.Name()
}
}
+
+// A NamedOrAlias is a [types.Type] that is named (as
+// defined by the spec) and capable of bearing type parameters: it
+// abstracts aliases ([types.Alias]) and defined types
+// ([types.Named]).
+//
+// Every type declared by an explicit "type" declaration is a
+// NamedOrAlias. (Built-in type symbols may additionally
+// have type [types.Basic], which is not a NamedOrAlias,
+// though the spec regards them as "named".)
+//
+// NamedOrAlias cannot expose the Origin method, because
+// [types.Alias.Origin] and [types.Named.Origin] have different
+// (covariant) result types; use [Origin] instead.
+type NamedOrAlias interface {
+ types.Type
+ Obj() *types.TypeName
+}
+
+// TypeParams is a light shim around t.TypeParams().
+// (go/types.Alias).TypeParams requires >= 1.23.
+func TypeParams(t NamedOrAlias) *types.TypeParamList {
+ switch t := t.(type) {
+ case *types.Alias:
+ return aliases.TypeParams(t)
+ case *types.Named:
+ return t.TypeParams()
+ }
+ return nil
+}
+
+// TypeArgs is a light shim around t.TypeArgs().
+// (go/types.Alias).TypeArgs requires >= 1.23.
+func TypeArgs(t NamedOrAlias) *types.TypeList {
+ switch t := t.(type) {
+ case *types.Alias:
+ return aliases.TypeArgs(t)
+ case *types.Named:
+ return t.TypeArgs()
+ }
+ return nil
+}
+
+// Origin returns the generic type of the Named or Alias type t if it
+// is instantiated, otherwise it returns t.
+func Origin(t NamedOrAlias) NamedOrAlias {
+ switch t := t.(type) {
+ case *types.Alias:
+ return aliases.Origin(t)
+ case *types.Named:
+ return t.Origin()
+ }
+ return t
+}
diff --git a/vendor/golang.org/x/tools/internal/versions/types.go b/vendor/golang.org/x/tools/internal/versions/types.go
index f0bb0d15f..0fc10ce4e 100644
--- a/vendor/golang.org/x/tools/internal/versions/types.go
+++ b/vendor/golang.org/x/tools/internal/versions/types.go
@@ -31,8 +31,3 @@ func FileVersion(info *types.Info, file *ast.File) string {
// This would act as a max version on what a tool can support.
return Future
}
-
-// InitFileVersions initializes info to record Go versions for Go files.
-func InitFileVersions(info *types.Info) {
- info.FileVersions = make(map[*ast.File]string)
-}
diff --git a/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json b/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
index e9e70c49e..37efa8dcd 100644
--- a/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
+++ b/vendor/google.golang.org/api/bigquery/v2/bigquery-api.json
@@ -41,6 +41,11 @@
},
{
"description": "Regional Endpoint",
+ "endpointUrl": "https://bigquery.europe-west8.rep.googleapis.com/",
+ "location": "europe-west8"
+ },
+ {
+ "description": "Regional Endpoint",
"endpointUrl": "https://bigquery.europe-west9.rep.googleapis.com/",
"location": "europe-west9"
},
@@ -1935,7 +1940,7 @@
}
}
},
- "revision": "20240815",
+ "revision": "20240905",
"rootUrl": "https://bigquery.googleapis.com/",
"schemas": {
"AggregateClassificationMetrics": {
@@ -3269,12 +3274,12 @@
"additionalProperties": {
"type": "string"
},
- "description": "Optional. The [tags](/bigquery/docs/tags) attached to this dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example \"123456789012/environment\" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example \"Production\". See [Tag definitions](/iam/docs/tags-access-control#definitions) for more details.",
+ "description": "Optional. The [tags](https://cloud.google.com/bigquery/docs/tags) attached to this dataset. Tag keys are globally unique. Tag key is expected to be in the namespaced format, for example \"123456789012/environment\" where 123456789012 is the ID of the parent organization or project resource for this tag key. Tag value is expected to be the short name, for example \"Production\". See [Tag definitions](https://cloud.google.com/iam/docs/tags-access-control#definitions) for more details.",
"type": "object"
},
"restrictions": {
"$ref": "RestrictionConfig",
- "description": "Optional. Output only. Restriction config for all tables and dataset. If set, restrict certain accesses on the dataset and all its tables based on the config. See [Data egress](/bigquery/docs/analytics-hub-introduction#data_egress) for more details.",
+ "description": "Optional. Output only. Restriction config for all tables and dataset. If set, restrict certain accesses on the dataset and all its tables based on the config. See [Data egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) for more details.",
"readOnly": true
},
"satisfiesPzi": {
@@ -4516,7 +4521,7 @@
"type": "object"
},
"HparamTuningTrial": {
- "description": "Training info of a trial in [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models.",
+ "description": "Training info of a trial in [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models.",
"id": "HparamTuningTrial",
"properties": {
"endTimeMs": {
@@ -4614,6 +4619,7 @@
"NOT_SUPPORTED_IN_STANDARD_EDITION",
"INDEX_SUPPRESSED_BY_FUNCTION_OPTION",
"QUERY_CACHE_HIT",
+ "STALE_INDEX",
"INTERNAL_ERROR",
"OTHER_REASON"
],
@@ -4636,6 +4642,7 @@
"Indicates that search indexes can not be used for search query with STANDARD edition.",
"Indicates that an option in the search function that cannot make use of the index has been selected.",
"Indicates that the query was cached, and thus the search index was not used.",
+ "The index cannot be used in the search query because it is stale.",
"Indicates an internal error that causes the search index to be unused.",
"Indicates that the reason search indexes cannot be used in the query is not covered by any of the other IndexUnusedReason options."
],
@@ -4810,7 +4817,7 @@
},
"jobCreationReason": {
"$ref": "JobCreationReason",
- "description": "Output only. The reason why a Job was created. [Preview](/products/#product-launch-stages)",
+ "description": "Output only. The reason why a Job was created. [Preview](https://cloud.google.com/products/#product-launch-stages)",
"readOnly": true
},
"jobReference": {
@@ -5370,7 +5377,7 @@
"type": "object"
},
"JobCreationReason": {
- "description": "Reason about why a Job was created from a [`jobs.query`](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query) method when used with `JOB_CREATION_OPTIONAL` Job creation mode. For [`jobs.insert`](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert) method calls it will always be `REQUESTED`. [Preview](/products/#product-launch-stages)",
+ "description": "Reason about why a Job was created from a [`jobs.query`](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/query) method when used with `JOB_CREATION_OPTIONAL` Job creation mode. For [`jobs.insert`](https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert) method calls it will always be `REQUESTED`. [Preview](https://cloud.google.com/products/#product-launch-stages)",
"id": "JobCreationReason",
"properties": {
"code": {
@@ -5854,7 +5861,7 @@
"readOnly": true
},
"statementType": {
- "description": "Output only. The type of query statement, if valid. Possible values: * `SELECT`: [`SELECT`](/bigquery/docs/reference/standard-sql/query-syntax#select_list) statement. * `ASSERT`: [`ASSERT`](/bigquery/docs/reference/standard-sql/debugging-statements#assert) statement. * `INSERT`: [`INSERT`](/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement) statement. * `UPDATE`: [`UPDATE`](/bigquery/docs/reference/standard-sql/query-syntax#update_statement) statement. * `DELETE`: [`DELETE`](/bigquery/docs/reference/standard-sql/data-manipulation-language) statement. * `MERGE`: [`MERGE`](/bigquery/docs/reference/standard-sql/data-manipulation-language) statement. * `CREATE_TABLE`: [`CREATE TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement) statement, without `AS SELECT`. * `CREATE_TABLE_AS_SELECT`: [`CREATE TABLE AS SELECT`](/bigquery/docs/reference/standard-sql/data-definition-language#query_statement) statement. * `CREATE_VIEW`: [`CREATE VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#create_view_statement) statement. * `CREATE_MODEL`: [`CREATE MODEL`](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_model_statement) statement. * `CREATE_MATERIALIZED_VIEW`: [`CREATE MATERIALIZED VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#create_materialized_view_statement) statement. * `CREATE_FUNCTION`: [`CREATE FUNCTION`](/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) statement. * `CREATE_TABLE_FUNCTION`: [`CREATE TABLE FUNCTION`](/bigquery/docs/reference/standard-sql/data-definition-language#create_table_function_statement) statement. * `CREATE_PROCEDURE`: [`CREATE PROCEDURE`](/bigquery/docs/reference/standard-sql/data-definition-language#create_procedure) statement. * `CREATE_ROW_ACCESS_POLICY`: [`CREATE ROW ACCESS POLICY`](/bigquery/docs/reference/standard-sql/data-definition-language#create_row_access_policy_statement) statement. * `CREATE_SCHEMA`: [`CREATE SCHEMA`](/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) statement. * `CREATE_SNAPSHOT_TABLE`: [`CREATE SNAPSHOT TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#create_snapshot_table_statement) statement. * `CREATE_SEARCH_INDEX`: [`CREATE SEARCH INDEX`](/bigquery/docs/reference/standard-sql/data-definition-language#create_search_index_statement) statement. * `DROP_TABLE`: [`DROP TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement) statement. * `DROP_EXTERNAL_TABLE`: [`DROP EXTERNAL TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_external_table_statement) statement. * `DROP_VIEW`: [`DROP VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_statement) statement. * `DROP_MODEL`: [`DROP MODEL`](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model) statement. * `DROP_MATERIALIZED_VIEW`: [`DROP MATERIALIZED VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_materialized_view_statement) statement. * `DROP_FUNCTION` : [`DROP FUNCTION`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_function_statement) statement. * `DROP_TABLE_FUNCTION` : [`DROP TABLE FUNCTION`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_function) statement. * `DROP_PROCEDURE`: [`DROP PROCEDURE`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedure_statement) statement. * `DROP_SEARCH_INDEX`: [`DROP SEARCH INDEX`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_index) statement. * `DROP_SCHEMA`: [`DROP SCHEMA`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_statement) statement. * `DROP_SNAPSHOT_TABLE`: [`DROP SNAPSHOT TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot_table_statement) statement. * `DROP_ROW_ACCESS_POLICY`: [`DROP [ALL] ROW ACCESS POLICY|POLICIES`](/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_access_policy_statement) statement. * `ALTER_TABLE`: [`ALTER TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_set_options_statement) statement. * `ALTER_VIEW`: [`ALTER VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_set_options_statement) statement. * `ALTER_MATERIALIZED_VIEW`: [`ALTER MATERIALIZED VIEW`](/bigquery/docs/reference/standard-sql/data-definition-language#alter_materialized_view_set_options_statement) statement. * `ALTER_SCHEMA`: [`ALTER SCHEMA`](/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema_set_options_statement) statement. * `SCRIPT`: [`SCRIPT`](/bigquery/docs/reference/standard-sql/procedural-language). * `TRUNCATE_TABLE`: [`TRUNCATE TABLE`](/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement) statement. * `CREATE_EXTERNAL_TABLE`: [`CREATE EXTERNAL TABLE`](/bigquery/docs/reference/standard-sql/data-definition-language#create_external_table_statement) statement. * `EXPORT_DATA`: [`EXPORT DATA`](/bigquery/docs/reference/standard-sql/other-statements#export_data_statement) statement. * `EXPORT_MODEL`: [`EXPORT MODEL`](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model) statement. * `LOAD_DATA`: [`LOAD DATA`](/bigquery/docs/reference/standard-sql/other-statements#load_data_statement) statement. * `CALL`: [`CALL`](/bigquery/docs/reference/standard-sql/procedural-language#call) statement.",
+ "description": "Output only. The type of query statement, if valid. Possible values: * `SELECT`: [`SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_list) statement. * `ASSERT`: [`ASSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/debugging-statements#assert) statement. * `INSERT`: [`INSERT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement) statement. * `UPDATE`: [`UPDATE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#update_statement) statement. * `DELETE`: [`DELETE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) statement. * `MERGE`: [`MERGE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language) statement. * `CREATE_TABLE`: [`CREATE TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement) statement, without `AS SELECT`. * `CREATE_TABLE_AS_SELECT`: [`CREATE TABLE AS SELECT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#query_statement) statement. * `CREATE_VIEW`: [`CREATE VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_view_statement) statement. * `CREATE_MODEL`: [`CREATE MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_model_statement) statement. * `CREATE_MATERIALIZED_VIEW`: [`CREATE MATERIALIZED VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_materialized_view_statement) statement. * `CREATE_FUNCTION`: [`CREATE FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement) statement. * `CREATE_TABLE_FUNCTION`: [`CREATE TABLE FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_function_statement) statement. * `CREATE_PROCEDURE`: [`CREATE PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_procedure) statement. * `CREATE_ROW_ACCESS_POLICY`: [`CREATE ROW ACCESS POLICY`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_row_access_policy_statement) statement. * `CREATE_SCHEMA`: [`CREATE SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement) statement. * `CREATE_SNAPSHOT_TABLE`: [`CREATE SNAPSHOT TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_snapshot_table_statement) statement. * `CREATE_SEARCH_INDEX`: [`CREATE SEARCH INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_search_index_statement) statement. * `DROP_TABLE`: [`DROP TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement) statement. * `DROP_EXTERNAL_TABLE`: [`DROP EXTERNAL TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_external_table_statement) statement. * `DROP_VIEW`: [`DROP VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_statement) statement. * `DROP_MODEL`: [`DROP MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model) statement. * `DROP_MATERIALIZED_VIEW`: [`DROP MATERIALIZED VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_materialized_view_statement) statement. * `DROP_FUNCTION` : [`DROP FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_function_statement) statement. * `DROP_TABLE_FUNCTION` : [`DROP TABLE FUNCTION`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_function) statement. * `DROP_PROCEDURE`: [`DROP PROCEDURE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedure_statement) statement. * `DROP_SEARCH_INDEX`: [`DROP SEARCH INDEX`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_index) statement. * `DROP_SCHEMA`: [`DROP SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_statement) statement. * `DROP_SNAPSHOT_TABLE`: [`DROP SNAPSHOT TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot_table_statement) statement. * `DROP_ROW_ACCESS_POLICY`: [`DROP [ALL] ROW ACCESS POLICY|POLICIES`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_access_policy_statement) statement. * `ALTER_TABLE`: [`ALTER TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_set_options_statement) statement. * `ALTER_VIEW`: [`ALTER VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_set_options_statement) statement. * `ALTER_MATERIALIZED_VIEW`: [`ALTER MATERIALIZED VIEW`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_materialized_view_set_options_statement) statement. * `ALTER_SCHEMA`: [`ALTER SCHEMA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema_set_options_statement) statement. * `SCRIPT`: [`SCRIPT`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language). * `TRUNCATE_TABLE`: [`TRUNCATE TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement) statement. * `CREATE_EXTERNAL_TABLE`: [`CREATE EXTERNAL TABLE`](https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_external_table_statement) statement. * `EXPORT_DATA`: [`EXPORT DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#export_data_statement) statement. * `EXPORT_MODEL`: [`EXPORT MODEL`](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model) statement. * `LOAD_DATA`: [`LOAD DATA`](https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#load_data_statement) statement. * `CALL`: [`CALL`](https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#call) statement.",
"readOnly": true,
"type": "string"
},
@@ -6370,7 +6377,7 @@
"id": "MlStatistics",
"properties": {
"hparamTrials": {
- "description": "Output only. Trials of a [hyperparameter tuning job](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) sorted by trial_id.",
+ "description": "Output only. Trials of a [hyperparameter tuning job](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) sorted by trial_id.",
"items": {
"$ref": "HparamTuningTrial"
},
@@ -6378,7 +6385,7 @@
"type": "array"
},
"iterationResults": {
- "description": "Results for all completed iterations. Empty for [hyperparameter tuning jobs](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview).",
+ "description": "Results for all completed iterations. Empty for [hyperparameter tuning jobs](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview).",
"items": {
"$ref": "IterationResult"
},
@@ -6459,7 +6466,7 @@
"enumDescriptions": [
"Unspecified training type.",
"Single training with fixed parameter space.",
- "[Hyperparameter tuning training](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)."
+ "[Hyperparameter tuning training](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)."
],
"readOnly": true,
"type": "string"
@@ -6483,7 +6490,7 @@
"type": "string"
},
"defaultTrialId": {
- "description": "Output only. The default trial_id to use in TVFs when the trial_id is not passed in. For single-objective [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, this is the best trial ID. For multi-objective [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, this is the smallest trial ID among all Pareto optimal trials.",
+ "description": "Output only. The default trial_id to use in TVFs when the trial_id is not passed in. For single-objective [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, this is the best trial ID. For multi-objective [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, this is the smallest trial ID among all Pareto optimal trials.",
"format": "int64",
"readOnly": true,
"type": "string"
@@ -6524,7 +6531,7 @@
"readOnly": true
},
"hparamTrials": {
- "description": "Output only. Trials of a [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) model sorted by trial_id.",
+ "description": "Output only. Trials of a [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) model sorted by trial_id.",
"items": {
"$ref": "HparamTuningTrial"
},
@@ -6621,7 +6628,7 @@
"type": "string"
},
"optimalTrialIds": {
- "description": "Output only. For single-objective [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, it only contains the best trial. For multi-objective [hyperparameter tuning](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, it contains all Pareto optimal trials sorted by trial_id.",
+ "description": "Output only. For single-objective [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, it only contains the best trial. For multi-objective [hyperparameter tuning](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview) models, it contains all Pareto optimal trials sorted by trial_id.",
"items": {
"format": "int64",
"type": "string"
@@ -6688,7 +6695,7 @@
"id": "ModelExtractOptions",
"properties": {
"trialId": {
- "description": "The 1-based ID of the trial to be exported from a hyperparameter tuning model. If not specified, the trial with id = [Model](/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId is exported. This field is ignored for models not trained with hyperparameter tuning.",
+ "description": "The 1-based ID of the trial to be exported from a hyperparameter tuning model. If not specified, the trial with id = [Model](https://cloud.google.com/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId is exported. This field is ignored for models not trained with hyperparameter tuning.",
"format": "int64",
"type": "string"
}
@@ -7109,7 +7116,7 @@
"description": "Optional. Output format adjustments."
},
"jobCreationMode": {
- "description": "Optional. If not set, jobs are always required. If set, the query request will follow the behavior described JobCreationMode. [Preview](/products/#product-launch-stages)",
+ "description": "Optional. If not set, jobs are always required. If set, the query request will follow the behavior described JobCreationMode. [Preview](https://cloud.google.com/products/#product-launch-stages)",
"enum": [
"JOB_CREATION_MODE_UNSPECIFIED",
"JOB_CREATION_REQUIRED",
@@ -7216,7 +7223,7 @@
},
"jobCreationReason": {
"$ref": "JobCreationReason",
- "description": "Optional. The reason why a Job was created. Only relevant when a job_reference is present in the response. If job_reference is not present it will always be unset. [Preview](/products/#product-launch-stages)"
+ "description": "Optional. The reason why a Job was created. Only relevant when a job_reference is present in the response. If job_reference is not present it will always be unset. [Preview](https://cloud.google.com/products/#product-launch-stages)"
},
"jobReference": {
"$ref": "JobReference",
@@ -7238,7 +7245,7 @@
"type": "string"
},
"queryId": {
- "description": "Auto-generated ID for the query. [Preview](/products/#product-launch-stages)",
+ "description": "Auto-generated ID for the query. [Preview](https://cloud.google.com/products/#product-launch-stages)",
"type": "string"
},
"rows": {
@@ -7502,7 +7509,7 @@
],
"enumDescriptions": [
"Should never be used.",
- "Restrict data egress. See [Data egress](/bigquery/docs/analytics-hub-introduction#data_egress) for more details."
+ "Restrict data egress. See [Data egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) for more details."
],
"readOnly": true,
"type": "string"
@@ -8266,7 +8273,7 @@
"type": "string"
},
"locationUri": {
- "description": "Optional. The physical location of the table (e.g. 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.",
+ "description": "Optional. The physical location of the table (e.g. `gs://spark-dataproc-data/pangea-data/case_sensitive/` or `gs://spark-dataproc-data/pangea-data/*`). The maximum length is 2056 bytes.",
"type": "string"
},
"outputFormat": {
@@ -8565,7 +8572,7 @@
},
"restrictions": {
"$ref": "RestrictionConfig",
- "description": "Optional. Output only. Restriction config for table. If set, restrict certain accesses on the table based on the config. See [Data egress](/bigquery/docs/analytics-hub-introduction#data_egress) for more details.",
+ "description": "Optional. Output only. Restriction config for table. If set, restrict certain accesses on the table based on the config. See [Data egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress) for more details.",
"readOnly": true
},
"schema": {
@@ -8604,7 +8611,7 @@
"description": "If specified, configures time-based partitioning for this table."
},
"type": {
- "description": "Output only. Describes the table type. The following values are supported: * `TABLE`: A normal BigQuery table. * `VIEW`: A virtual table defined by a SQL query. * `EXTERNAL`: A table that references data stored in an external storage system, such as Google Cloud Storage. * `MATERIALIZED_VIEW`: A precomputed view defined by a SQL query. * `SNAPSHOT`: An immutable BigQuery table that preserves the contents of a base table at a particular time. See additional information on [table snapshots](/bigquery/docs/table-snapshots-intro). The default value is `TABLE`.",
+ "description": "Output only. Describes the table type. The following values are supported: * `TABLE`: A normal BigQuery table. * `VIEW`: A virtual table defined by a SQL query. * `EXTERNAL`: A table that references data stored in an external storage system, such as Google Cloud Storage. * `MATERIALIZED_VIEW`: A precomputed view defined by a SQL query. * `SNAPSHOT`: An immutable BigQuery table that preserves the contents of a base table at a particular time. See additional information on [table snapshots](https://cloud.google.com/bigquery/docs/table-snapshots-intro). The default value is `TABLE`.",
"readOnly": true,
"type": "string"
},
@@ -9035,7 +9042,7 @@
"description": "Metadata caching eligible table referenced in the query."
},
"tableType": {
- "description": "[Table type](/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type).",
+ "description": "[Table type](https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type).",
"type": "string"
},
"unusedReason": {
diff --git a/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go b/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
index 82d41a7a9..d4bd7e23e 100644
--- a/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
+++ b/vendor/google.golang.org/api/bigquery/v2/bigquery-gen.go
@@ -2078,18 +2078,21 @@ type Dataset struct {
// value can be from 48 to 168 hours (2 to 7 days). The default value is 168
// hours if this is not set.
MaxTimeTravelHours int64 `json:"maxTimeTravelHours,omitempty,string"`
- // ResourceTags: Optional. The tags (/bigquery/docs/tags) attached to this
- // dataset. Tag keys are globally unique. Tag key is expected to be in the
- // namespaced format, for example "123456789012/environment" where 123456789012
- // is the ID of the parent organization or project resource for this tag key.
- // Tag value is expected to be the short name, for example "Production". See
- // Tag definitions (/iam/docs/tags-access-control#definitions) for more
+ // ResourceTags: Optional. The tags
+ // (https://cloud.google.com/bigquery/docs/tags) attached to this dataset. Tag
+ // keys are globally unique. Tag key is expected to be in the namespaced
+ // format, for example "123456789012/environment" where 123456789012 is the ID
+ // of the parent organization or project resource for this tag key. Tag value
+ // is expected to be the short name, for example "Production". See Tag
+ // definitions
+ // (https://cloud.google.com/iam/docs/tags-access-control#definitions) for more
// details.
ResourceTags map[string]string `json:"resourceTags,omitempty"`
// Restrictions: Optional. Output only. Restriction config for all tables and
// dataset. If set, restrict certain accesses on the dataset and all its tables
// based on the config. See Data egress
- // (/bigquery/docs/analytics-hub-introduction#data_egress) for more details.
+ // (https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress)
+ // for more details.
Restrictions *RestrictionConfig `json:"restrictions,omitempty"`
// SatisfiesPzi: Output only. Reserved for future use.
SatisfiesPzi bool `json:"satisfiesPzi,omitempty"`
@@ -3829,8 +3832,8 @@ func (s HparamSearchSpaces) MarshalJSON() ([]byte, error) {
}
// HparamTuningTrial: Training info of a trial in hyperparameter tuning
-// (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
-// w) models.
+// (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+// models.
type HparamTuningTrial struct {
// EndTimeMs: Ending time of the trial.
EndTimeMs int64 `json:"endTimeMs,omitempty,string"`
@@ -3949,6 +3952,8 @@ type IndexUnusedReason struct {
// search function that cannot make use of the index has been selected.
// "QUERY_CACHE_HIT" - Indicates that the query was cached, and thus the
// search index was not used.
+ // "STALE_INDEX" - The index cannot be used in the search query because it is
+ // stale.
// "INTERNAL_ERROR" - Indicates an internal error that causes the search
// index to be unused.
// "OTHER_REASON" - Indicates that the reason search indexes cannot be used
@@ -4190,7 +4195,7 @@ type Job struct {
// Id: Output only. Opaque ID field of the job.
Id string `json:"id,omitempty"`
// JobCreationReason: Output only. The reason why a Job was created. Preview
- // (/products/#product-launch-stages)
+ // (https://cloud.google.com/products/#product-launch-stages)
JobCreationReason *JobCreationReason `json:"jobCreationReason,omitempty"`
// JobReference: Optional. Reference describing the unique-per-user name of the
// job.
@@ -4894,7 +4899,7 @@ func (s JobConfigurationTableCopy) MarshalJSON() ([]byte, error) {
// when used with `JOB_CREATION_OPTIONAL` Job creation mode. For `jobs.insert`
// (https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs/insert)
// method calls it will always be `REQUESTED`. Preview
-// (/products/#product-launch-stages)
+// (https://cloud.google.com/products/#product-launch-stages)
type JobCreationReason struct {
// Code: Output only. Specifies the high level reason why a Job was created.
//
@@ -5267,93 +5272,90 @@ type JobStatistics2 struct {
SparkStatistics *SparkStatistics `json:"sparkStatistics,omitempty"`
// StatementType: Output only. The type of query statement, if valid. Possible
// values: * `SELECT`: `SELECT`
- // (/bigquery/docs/reference/standard-sql/query-syntax#select_list) statement.
- // * `ASSERT`: `ASSERT`
- // (/bigquery/docs/reference/standard-sql/debugging-statements#assert)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#select_list)
+ // statement. * `ASSERT`: `ASSERT`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/debugging-statements#assert)
// statement. * `INSERT`: `INSERT`
- // (/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#insert_statement)
// statement. * `UPDATE`: `UPDATE`
- // (/bigquery/docs/reference/standard-sql/query-syntax#update_statement)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/query-syntax#update_statement)
// statement. * `DELETE`: `DELETE`
- // (/bigquery/docs/reference/standard-sql/data-manipulation-language)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language)
// statement. * `MERGE`: `MERGE`
- // (/bigquery/docs/reference/standard-sql/data-manipulation-language)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-manipulation-language)
// statement. * `CREATE_TABLE`: `CREATE TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_table_
- // statement) statement, without `AS SELECT`. * `CREATE_TABLE_AS_SELECT`:
- // `CREATE TABLE AS SELECT`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#query_stateme
- // nt) statement. * `CREATE_VIEW`: `CREATE VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_view_s
- // tatement) statement. * `CREATE_MODEL`: `CREATE MODEL`
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_mod
- // el_statement) statement. * `CREATE_MATERIALIZED_VIEW`: `CREATE MATERIALIZED
- // VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_materi
- // alized_view_statement) statement. * `CREATE_FUNCTION`: `CREATE FUNCTION`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_functi
- // on_statement) statement. * `CREATE_TABLE_FUNCTION`: `CREATE TABLE FUNCTION`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_table_
- // function_statement) statement. * `CREATE_PROCEDURE`: `CREATE PROCEDURE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_proced
- // ure) statement. * `CREATE_ROW_ACCESS_POLICY`: `CREATE ROW ACCESS POLICY`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_row_ac
- // cess_policy_statement) statement. * `CREATE_SCHEMA`: `CREATE SCHEMA`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_schema
- // _statement) statement. * `CREATE_SNAPSHOT_TABLE`: `CREATE SNAPSHOT TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_snapsh
- // ot_table_statement) statement. * `CREATE_SEARCH_INDEX`: `CREATE SEARCH
- // INDEX`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_search
- // _index_statement) statement. * `DROP_TABLE`: `DROP TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_st
- // atement) statement. * `DROP_EXTERNAL_TABLE`: `DROP EXTERNAL TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_external
- // _table_statement) statement. * `DROP_VIEW`: `DROP VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_sta
- // tement) statement. * `DROP_MODEL`: `DROP MODEL`
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_statement)
+ // statement, without `AS SELECT`. * `CREATE_TABLE_AS_SELECT`: `CREATE TABLE AS
+ // SELECT`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#query_statement)
+ // statement. * `CREATE_VIEW`: `CREATE VIEW`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_view_statement)
+ // statement. * `CREATE_MODEL`: `CREATE MODEL`
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-create#create_model_statement)
+ // statement. * `CREATE_MATERIALIZED_VIEW`: `CREATE MATERIALIZED VIEW`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_materialized_view_statement)
+ // statement. * `CREATE_FUNCTION`: `CREATE FUNCTION`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_function_statement)
+ // statement. * `CREATE_TABLE_FUNCTION`: `CREATE TABLE FUNCTION`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_table_function_statement)
+ // statement. * `CREATE_PROCEDURE`: `CREATE PROCEDURE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_procedure)
+ // statement. * `CREATE_ROW_ACCESS_POLICY`: `CREATE ROW ACCESS POLICY`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_row_access_policy_statement)
+ // statement. * `CREATE_SCHEMA`: `CREATE SCHEMA`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_schema_statement)
+ // statement. * `CREATE_SNAPSHOT_TABLE`: `CREATE SNAPSHOT TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_snapshot_table_statement)
+ // statement. * `CREATE_SEARCH_INDEX`: `CREATE SEARCH INDEX`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_search_index_statement)
+ // statement. * `DROP_TABLE`: `DROP TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_statement)
+ // statement. * `DROP_EXTERNAL_TABLE`: `DROP EXTERNAL TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_external_table_statement)
+ // statement. * `DROP_VIEW`: `DROP VIEW`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_view_statement)
+ // statement. * `DROP_MODEL`: `DROP MODEL`
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-drop-model)
// statement. * `DROP_MATERIALIZED_VIEW`: `DROP MATERIALIZED VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_material
- // ized_view_statement) statement. * `DROP_FUNCTION` : `DROP FUNCTION`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_function
- // _statement) statement. * `DROP_TABLE_FUNCTION` : `DROP TABLE FUNCTION`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_fu
- // nction) statement. * `DROP_PROCEDURE`: `DROP PROCEDURE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedur
- // e_statement) statement. * `DROP_SEARCH_INDEX`: `DROP SEARCH INDEX`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_i
- // ndex) statement. * `DROP_SCHEMA`: `DROP SCHEMA`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_s
- // tatement) statement. * `DROP_SNAPSHOT_TABLE`: `DROP SNAPSHOT TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot
- // _table_statement) statement. * `DROP_ROW_ACCESS_POLICY`: [`DROP ALL] ROW
- // ACCESS POLICY|POLICIES`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_acce
- // ss_policy_statement) statement. * `ALTER_TABLE`: `ALTER TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_s
- // et_options_statement) statement. * `ALTER_VIEW`: `ALTER VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_se
- // t_options_statement) statement. * `ALTER_MATERIALIZED_VIEW`: `ALTER
- // MATERIALIZED VIEW`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#alter_materia
- // lized_view_set_options_statement) statement. * `ALTER_SCHEMA`: `ALTER
- // SCHEMA`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema
- // _set_options_statement) statement. * `SCRIPT`: `SCRIPT`
- // (/bigquery/docs/reference/standard-sql/procedural-language). *
- // `TRUNCATE_TABLE`: `TRUNCATE TABLE`
- // (/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_materialized_view_statement)
+ // statement. * `DROP_FUNCTION` : `DROP FUNCTION`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_function_statement)
+ // statement. * `DROP_TABLE_FUNCTION` : `DROP TABLE FUNCTION`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_table_function)
+ // statement. * `DROP_PROCEDURE`: `DROP PROCEDURE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_procedure_statement)
+ // statement. * `DROP_SEARCH_INDEX`: `DROP SEARCH INDEX`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_search_index)
+ // statement. * `DROP_SCHEMA`: `DROP SCHEMA`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_schema_statement)
+ // statement. * `DROP_SNAPSHOT_TABLE`: `DROP SNAPSHOT TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_snapshot_table_statement)
+ // statement. * `DROP_ROW_ACCESS_POLICY`: [`DROP ALL] ROW ACCESS
+ // POLICY|POLICIES`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#drop_row_access_policy_statement)
+ // statement. * `ALTER_TABLE`: `ALTER TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_table_set_options_statement)
+ // statement. * `ALTER_VIEW`: `ALTER VIEW`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_view_set_options_statement)
+ // statement. * `ALTER_MATERIALIZED_VIEW`: `ALTER MATERIALIZED VIEW`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#alter_materialized_view_set_options_statement)
+ // statement. * `ALTER_SCHEMA`: `ALTER SCHEMA`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#aalter_schema_set_options_statement)
+ // statement. * `SCRIPT`: `SCRIPT`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language).
+ // * `TRUNCATE_TABLE`: `TRUNCATE TABLE`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/dml-syntax#truncate_table_statement)
// statement. * `CREATE_EXTERNAL_TABLE`: `CREATE EXTERNAL TABLE`
- // (/bigquery/docs/reference/standard-sql/data-definition-language#create_extern
- // al_table_statement) statement. * `EXPORT_DATA`: `EXPORT DATA`
- // (/bigquery/docs/reference/standard-sql/other-statements#export_data_statement
- // ) statement. * `EXPORT_MODEL`: `EXPORT MODEL`
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#create_external_table_statement)
+ // statement. * `EXPORT_DATA`: `EXPORT DATA`
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#export_data_statement)
+ // statement. * `EXPORT_MODEL`: `EXPORT MODEL`
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-export-model)
// statement. * `LOAD_DATA`: `LOAD DATA`
- // (/bigquery/docs/reference/standard-sql/other-statements#load_data_statement)
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/other-statements#load_data_statement)
// statement. * `CALL`: `CALL`
- // (/bigquery/docs/reference/standard-sql/procedural-language#call) statement.
+ // (https://cloud.google.com/bigquery/docs/reference/standard-sql/procedural-language#call)
+ // statement.
StatementType string `json:"statementType,omitempty"`
// Timeline: Output only. Describes a timeline of job execution.
Timeline []*QueryTimelineSample `json:"timeline,omitempty"`
@@ -6009,13 +6011,12 @@ func (s MetadataCacheStatistics) MarshalJSON() ([]byte, error) {
// MlStatistics: Job statistics specific to a BigQuery ML training job.
type MlStatistics struct {
// HparamTrials: Output only. Trials of a hyperparameter tuning job
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) sorted by trial_id.
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // sorted by trial_id.
HparamTrials []*HparamTuningTrial `json:"hparamTrials,omitempty"`
// IterationResults: Results for all completed iterations. Empty for
// hyperparameter tuning jobs
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w).
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview).
IterationResults []*IterationResult `json:"iterationResults,omitempty"`
// MaxIterations: Output only. Maximum number of iterations specified as
// max_iterations in the 'CREATE MODEL' query. The actual number of iterations
@@ -6057,8 +6058,8 @@ type MlStatistics struct {
// "TRAINING_TYPE_UNSPECIFIED" - Unspecified training type.
// "SINGLE_TRAINING" - Single training with fixed parameter space.
// "HPARAM_TUNING" - [Hyperparameter tuning
- // training](/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tunin
- // g-overview).
+ // training](https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bi
+ // gqueryml-syntax-hp-tuning-overview).
TrainingType string `json:"trainingType,omitempty"`
// ForceSendFields is a list of field names (e.g. "HparamTrials") to
// unconditionally include in API requests. By default, fields with empty or
@@ -6086,11 +6087,10 @@ type Model struct {
CreationTime int64 `json:"creationTime,omitempty,string"`
// DefaultTrialId: Output only. The default trial_id to use in TVFs when the
// trial_id is not passed in. For single-objective hyperparameter tuning
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) models, this is the best trial ID. For multi-objective hyperparameter
- // tuning
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) models, this is the smallest trial ID among all Pareto optimal trials.
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // models, this is the best trial ID. For multi-objective hyperparameter tuning
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // models, this is the smallest trial ID among all Pareto optimal trials.
DefaultTrialId int64 `json:"defaultTrialId,omitempty,string"`
// Description: Optional. A user-friendly description of this model.
Description string `json:"description,omitempty"`
@@ -6117,8 +6117,8 @@ type Model struct {
// model.
HparamSearchSpaces *HparamSearchSpaces `json:"hparamSearchSpaces,omitempty"`
// HparamTrials: Output only. Trials of a hyperparameter tuning
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) model sorted by trial_id.
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // model sorted by trial_id.
HparamTrials []*HparamTuningTrial `json:"hparamTrials,omitempty"`
// LabelColumns: Output only. Label columns that were used to train this model.
// The output of the model will have a "predicted_" prefix to these columns.
@@ -6169,11 +6169,11 @@ type Model struct {
// clause along with statistics useful for ML analytic functions.
ModelType string `json:"modelType,omitempty"`
// OptimalTrialIds: Output only. For single-objective hyperparameter tuning
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) models, it only contains the best trial. For multi-objective
- // hyperparameter tuning
- // (/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overvie
- // w) models, it contains all Pareto optimal trials sorted by trial_id.
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // models, it only contains the best trial. For multi-objective hyperparameter
+ // tuning
+ // (https://cloud.google.com/bigquery-ml/docs/reference/standard-sql/bigqueryml-syntax-hp-tuning-overview)
+ // models, it contains all Pareto optimal trials sorted by trial_id.
OptimalTrialIds googleapi.Int64s `json:"optimalTrialIds,omitempty"`
// RemoteModelInfo: Output only. Remote model info
RemoteModelInfo *RemoteModelInfo `json:"remoteModelInfo,omitempty"`
@@ -6256,9 +6256,9 @@ func (s ModelDefinitionModelOptions) MarshalJSON() ([]byte, error) {
type ModelExtractOptions struct {
// TrialId: The 1-based ID of the trial to be exported from a hyperparameter
// tuning model. If not specified, the trial with id = Model
- // (/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId is
- // exported. This field is ignored for models not trained with hyperparameter
- // tuning.
+ // (https://cloud.google.com/bigquery/docs/reference/rest/v2/models#resource:-model).defaultTrialId
+ // is exported. This field is ignored for models not trained with
+ // hyperparameter tuning.
TrialId int64 `json:"trialId,omitempty,string"`
// ForceSendFields is a list of field names (e.g. "TrialId") to unconditionally
// include in API requests. By default, fields with empty or default values are
@@ -6900,7 +6900,7 @@ type QueryRequest struct {
FormatOptions *DataFormatOptions `json:"formatOptions,omitempty"`
// JobCreationMode: Optional. If not set, jobs are always required. If set, the
// query request will follow the behavior described JobCreationMode. Preview
- // (/products/#product-launch-stages)
+ // (https://cloud.google.com/products/#product-launch-stages)
//
// Possible values:
// "JOB_CREATION_MODE_UNSPECIFIED" - If unspecified JOB_CREATION_REQUIRED is
@@ -7029,7 +7029,8 @@ type QueryResponse struct {
JobComplete bool `json:"jobComplete,omitempty"`
// JobCreationReason: Optional. The reason why a Job was created. Only relevant
// when a job_reference is present in the response. If job_reference is not
- // present it will always be unset. Preview (/products/#product-launch-stages)
+ // present it will always be unset. Preview
+ // (https://cloud.google.com/products/#product-launch-stages)
JobCreationReason *JobCreationReason `json:"jobCreationReason,omitempty"`
// JobReference: Reference to the Job that was created to run the query. This
// field will be present even if the original request timed out, in which case
@@ -7052,7 +7053,7 @@ type QueryResponse struct {
// (https://cloud.google.com/bigquery/docs/paging-results).
PageToken string `json:"pageToken,omitempty"`
// QueryId: Auto-generated ID for the query. Preview
- // (/products/#product-launch-stages)
+ // (https://cloud.google.com/products/#product-launch-stages)
QueryId string `json:"queryId,omitempty"`
// Rows: An object with as many results as can be contained within the maximum
// permitted reply size. To get any additional rows, you can call
@@ -7419,8 +7420,8 @@ type RestrictionConfig struct {
// Possible values:
// "RESTRICTION_TYPE_UNSPECIFIED" - Should never be used.
// "RESTRICTED_DATA_EGRESS" - Restrict data egress. See [Data
- // egress](/bigquery/docs/analytics-hub-introduction#data_egress) for more
- // details.
+ // egress](https://cloud.google.com/bigquery/docs/analytics-hub-introduction#dat
+ // a_egress) for more details.
Type string `json:"type,omitempty"`
// ForceSendFields is a list of field names (e.g. "Type") to unconditionally
// include in API requests. By default, fields with empty or default values are
@@ -8346,8 +8347,8 @@ type StorageDescriptor struct {
// maximum length is 128 characters.
InputFormat string `json:"inputFormat,omitempty"`
// LocationUri: Optional. The physical location of the table (e.g.
- // 'gs://spark-dataproc-data/pangea-data/case_sensitive/' or
- // 'gs://spark-dataproc-data/pangea-data/*'). The maximum length is 2056 bytes.
+ // `gs://spark-dataproc-data/pangea-data/case_sensitive/` or
+ // `gs://spark-dataproc-data/pangea-data/*`). The maximum length is 2056 bytes.
LocationUri string `json:"locationUri,omitempty"`
// OutputFormat: Optional. Specifies the fully qualified class name of the
// OutputFormat (e.g. "org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat"). The
@@ -8610,7 +8611,8 @@ type Table struct {
ResourceTags map[string]string `json:"resourceTags,omitempty"`
// Restrictions: Optional. Output only. Restriction config for table. If set,
// restrict certain accesses on the table based on the config. See Data egress
- // (/bigquery/docs/analytics-hub-introduction#data_egress) for more details.
+ // (https://cloud.google.com/bigquery/docs/analytics-hub-introduction#data_egress)
+ // for more details.
Restrictions *RestrictionConfig `json:"restrictions,omitempty"`
// Schema: Optional. Describes the schema of this table.
Schema *TableSchema `json:"schema,omitempty"`
@@ -8640,8 +8642,8 @@ type Table struct {
// `MATERIALIZED_VIEW`: A precomputed view defined by a SQL query. *
// `SNAPSHOT`: An immutable BigQuery table that preserves the contents of a
// base table at a particular time. See additional information on table
- // snapshots (/bigquery/docs/table-snapshots-intro). The default value is
- // `TABLE`.
+ // snapshots (https://cloud.google.com/bigquery/docs/table-snapshots-intro).
+ // The default value is `TABLE`.
Type string `json:"type,omitempty"`
// View: Optional. The view definition.
View *ViewDefinition `json:"view,omitempty"`
@@ -9266,7 +9268,7 @@ type TableMetadataCacheUsage struct {
// TableReference: Metadata caching eligible table referenced in the query.
TableReference *TableReference `json:"tableReference,omitempty"`
// TableType: Table type
- // (/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type).
+ // (https://cloud.google.com/bigquery/docs/reference/rest/v2/tables#Table.FIELDS.type).
TableType string `json:"tableType,omitempty"`
// UnusedReason: Reason for not using metadata caching for the table.
//
diff --git a/vendor/google.golang.org/api/compute/v1/compute-api.json b/vendor/google.golang.org/api/compute/v1/compute-api.json
index 70bd1cc81..fcf10a73c 100644
--- a/vendor/google.golang.org/api/compute/v1/compute-api.json
+++ b/vendor/google.golang.org/api/compute/v1/compute-api.json
@@ -1915,7 +1915,7 @@
]
},
"listUsable": {
- "description": "Retrieves an aggregated list of all usable backend services in the specified project.",
+ "description": "Retrieves a list of all usable backend services in the specified project.",
"flatPath": "projects/{project}/global/backendServices/listUsable",
"httpMethod": "GET",
"id": "compute.backendServices.listUsable",
@@ -20638,7 +20638,7 @@
]
},
"listUsable": {
- "description": "Retrieves an aggregated list of all usable backend services in the specified project in the given region.",
+ "description": "Retrieves a list of all usable backend services in the specified project in the given region.",
"flatPath": "projects/{project}/regions/{region}/backendServices/listUsable",
"httpMethod": "GET",
"id": "compute.regionBackendServices.listUsable",
@@ -37779,7 +37779,7 @@
}
}
},
- "revision": "20240827",
+ "revision": "20240903",
"rootUrl": "https://compute.googleapis.com/",
"schemas": {
"AWSV4Signature": {
@@ -40488,6 +40488,13 @@
"selfLink": {
"description": "[Output Only] Server-defined URL for the resource.",
"type": "string"
+ },
+ "usedBy": {
+ "description": "[Output Only] List of resources referencing that backend bucket.",
+ "items": {
+ "$ref": "BackendBucketUsedBy"
+ },
+ "type": "array"
}
},
"type": "object"
@@ -40774,6 +40781,16 @@
},
"type": "object"
},
+ "BackendBucketUsedBy": {
+ "id": "BackendBucketUsedBy",
+ "properties": {
+ "reference": {
+ "description": "[Output Only] Server-defined URL for UrlMaps referencing that BackendBucket.",
+ "type": "string"
+ }
+ },
+ "type": "object"
+ },
"BackendService": {
"description": "Represents a Backend Service resource. A backend service defines how Google Cloud load balancers distribute traffic. The backend service configuration contains a set of values, such as the protocol used to connect to backends, various distribution and session settings, health checks, and timeouts. These settings provide fine-grained control over how your load balancer behaves. Most of the settings have default values that allow for easy configuration if you need to get started quickly. Backend services in Google Compute Engine can be either regionally or globally scoped. * [Global](https://cloud.google.com/compute/docs/reference/rest/v1/backendServices) * [Regional](https://cloud.google.com/compute/docs/reference/rest/v1/regionBackendServices) For more information, see Backend Services.",
"id": "BackendService",
@@ -60665,6 +60682,11 @@
"description": "[Output Only] Address allocated from given subnetwork for PSC. This IP address acts as a VIP for a PSC NEG, allowing it to act as an endpoint in L7 PSC-XLB.",
"type": "string"
},
+ "producerPort": {
+ "description": "The psc producer port is used to connect PSC NEG with specific port on the PSC Producer side; should only be used for the PRIVATE_SERVICE_CONNECT NEG type",
+ "format": "int32",
+ "type": "integer"
+ },
"pscConnectionId": {
"description": "[Output Only] The PSC connection id of the PSC Network Endpoint Group Consumer.",
"format": "uint64",
@@ -78077,7 +78099,7 @@
"type": "string"
},
"internalIpv6Prefix": {
- "description": "[Output Only] The internal IPv6 address range that is assigned to this subnetwork.",
+ "description": "The internal IPv6 address range that is owned by this subnetwork.",
"type": "string"
},
"ipCidrRange": {
diff --git a/vendor/google.golang.org/api/compute/v1/compute-gen.go b/vendor/google.golang.org/api/compute/v1/compute-gen.go
index ca8f1ffd0..1a0dde8f2 100644
--- a/vendor/google.golang.org/api/compute/v1/compute-gen.go
+++ b/vendor/google.golang.org/api/compute/v1/compute-gen.go
@@ -4492,6 +4492,8 @@ type BackendBucket struct {
Name string `json:"name,omitempty"`
// SelfLink: [Output Only] Server-defined URL for the resource.
SelfLink string `json:"selfLink,omitempty"`
+ // UsedBy: [Output Only] List of resources referencing that backend bucket.
+ UsedBy []*BackendBucketUsedBy `json:"usedBy,omitempty"`
// ServerResponse contains the HTTP response code and headers from the server.
googleapi.ServerResponse `json:"-"`
@@ -4881,6 +4883,28 @@ func (s BackendBucketListWarningData) MarshalJSON() ([]byte, error) {
return gensupport.MarshalJSON(NoMethod(s), s.ForceSendFields, s.NullFields)
}
+type BackendBucketUsedBy struct {
+ // Reference: [Output Only] Server-defined URL for UrlMaps referencing that
+ // BackendBucket.
+ Reference string `json:"reference,omitempty"`
+ // ForceSendFields is a list of field names (e.g. "Reference") to
+ // unconditionally include in API requests. By default, fields with empty or
+ // default values are omitted from API requests. See
+ // https://pkg.go.dev/google.golang.org/api#hdr-ForceSendFields for more
+ // details.
+ ForceSendFields []string `json:"-"`
+ // NullFields is a list of field names (e.g. "Reference") to include in API
+ // requests with the JSON null value. By default, fields with empty values are
+ // omitted from API requests. See
+ // https://pkg.go.dev/google.golang.org/api#hdr-NullFields for more details.
+ NullFields []string `json:"-"`
+}
+
+func (s BackendBucketUsedBy) MarshalJSON() ([]byte, error) {
+ type NoMethod BackendBucketUsedBy
+ return gensupport.MarshalJSON(NoMethod(s), s.ForceSendFields, s.NullFields)
+}
+
// BackendService: Represents a Backend Service resource. A backend service
// defines how Google Cloud load balancers distribute traffic. The backend
// service configuration contains a set of values, such as the protocol used to
@@ -27686,6 +27710,10 @@ type NetworkEndpointGroupPscData struct {
// for PSC. This IP address acts as a VIP for a PSC NEG, allowing it to act as
// an endpoint in L7 PSC-XLB.
ConsumerPscAddress string `json:"consumerPscAddress,omitempty"`
+ // ProducerPort: The psc producer port is used to connect PSC NEG with specific
+ // port on the PSC Producer side; should only be used for the
+ // PRIVATE_SERVICE_CONNECT NEG type
+ ProducerPort int64 `json:"producerPort,omitempty"`
// PscConnectionId: [Output Only] The PSC connection id of the PSC Network
// Endpoint Group Consumer.
PscConnectionId uint64 `json:"pscConnectionId,omitempty,string"`
@@ -46388,8 +46416,8 @@ type Subnetwork struct {
// Id: [Output Only] The unique identifier for the resource. This identifier is
// defined by the server.
Id uint64 `json:"id,omitempty,string"`
- // InternalIpv6Prefix: [Output Only] The internal IPv6 address range that is
- // assigned to this subnetwork.
+ // InternalIpv6Prefix: The internal IPv6 address range that is owned by this
+ // subnetwork.
InternalIpv6Prefix string `json:"internalIpv6Prefix,omitempty"`
// IpCidrRange: The range of internal addresses that are owned by this
// subnetwork. Provide this property when you create the subnetwork. For
diff --git a/vendor/google.golang.org/api/compute/v1/compute2-gen.go b/vendor/google.golang.org/api/compute/v1/compute2-gen.go
index 8b9c7f13e..74f8dee4b 100644
--- a/vendor/google.golang.org/api/compute/v1/compute2-gen.go
+++ b/vendor/google.golang.org/api/compute/v1/compute2-gen.go
@@ -5413,8 +5413,8 @@ type BackendServicesListUsableCall struct {
header_ http.Header
}
-// ListUsable: Retrieves an aggregated list of all usable backend services in
-// the specified project.
+// ListUsable: Retrieves a list of all usable backend services in the specified
+// project.
//
// - project: Project ID for this request.
func (r *BackendServicesService) ListUsable(project string) *BackendServicesListUsableCall {
diff --git a/vendor/google.golang.org/api/compute/v1/compute3-gen.go b/vendor/google.golang.org/api/compute/v1/compute3-gen.go
index 71248bab0..dc8c4d4ff 100644
--- a/vendor/google.golang.org/api/compute/v1/compute3-gen.go
+++ b/vendor/google.golang.org/api/compute/v1/compute3-gen.go
@@ -1650,8 +1650,8 @@ type RegionBackendServicesListUsableCall struct {
header_ http.Header
}
-// ListUsable: Retrieves an aggregated list of all usable backend services in
-// the specified project in the given region.
+// ListUsable: Retrieves a list of all usable backend services in the specified
+// project in the given region.
//
// - project: Project ID for this request.
// - region: Name of the region scoping this request. It must be a string that
diff --git a/vendor/google.golang.org/api/internal/version.go b/vendor/google.golang.org/api/internal/version.go
index 86152a19f..ed0987b42 100644
--- a/vendor/google.golang.org/api/internal/version.go
+++ b/vendor/google.golang.org/api/internal/version.go
@@ -5,4 +5,4 @@
package internal
// Version is the current tagged release of the library.
-const Version = "0.197.0"
+const Version = "0.198.0"
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 080e0caa2..8e884b7cf 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -25,7 +25,7 @@ cloud.google.com/go/internal/version
cloud.google.com/go/ai/generativelanguage/apiv1beta
cloud.google.com/go/ai/generativelanguage/apiv1beta/generativelanguagepb
cloud.google.com/go/ai/internal
-# cloud.google.com/go/auth v0.9.3
+# cloud.google.com/go/auth v0.9.4
## explicit; go 1.21
cloud.google.com/go/auth
cloud.google.com/go/auth/credentials
@@ -107,17 +107,17 @@ cloud.google.com/go/storage/internal/apiv2/storagepb
# github.com/4meepo/tagalign v1.3.4
## explicit; go 1.19
github.com/4meepo/tagalign
-# github.com/Abirdcfly/dupword v0.1.1
-## explicit; go 1.20
+# github.com/Abirdcfly/dupword v0.1.3
+## explicit; go 1.22.0
github.com/Abirdcfly/dupword
-# github.com/Antonboom/errname v0.1.13
-## explicit; go 1.20
+# github.com/Antonboom/errname v1.0.0
+## explicit; go 1.22.1
github.com/Antonboom/errname/pkg/analyzer
-# github.com/Antonboom/nilnil v0.1.9
-## explicit; go 1.20
+# github.com/Antonboom/nilnil v1.0.0
+## explicit; go 1.22.0
github.com/Antonboom/nilnil/pkg/analyzer
-# github.com/Antonboom/testifylint v1.4.3
-## explicit; go 1.20
+# github.com/Antonboom/testifylint v1.5.0
+## explicit; go 1.22.1
github.com/Antonboom/testifylint/analyzer
github.com/Antonboom/testifylint/internal/analysisutil
github.com/Antonboom/testifylint/internal/checkers
@@ -155,10 +155,10 @@ github.com/OpenPeeDeeP/depguard/v2/internal/utils
# github.com/VividCortex/gohistogram v1.0.0
## explicit
github.com/VividCortex/gohistogram
-# github.com/alecthomas/go-check-sumtype v0.1.4
+# github.com/alecthomas/go-check-sumtype v0.2.0
## explicit; go 1.18
github.com/alecthomas/go-check-sumtype
-# github.com/alexkohler/nakedret/v2 v2.0.4
+# github.com/alexkohler/nakedret/v2 v2.0.5
## explicit; go 1.21
github.com/alexkohler/nakedret/v2
# github.com/alexkohler/prealloc v1.0.0
@@ -199,8 +199,8 @@ github.com/ashanbrown/makezero/makezero
# github.com/beorn7/perks v1.0.1
## explicit; go 1.11
github.com/beorn7/perks/quantile
-# github.com/bkielbasa/cyclop v1.2.1
-## explicit; go 1.20
+# github.com/bkielbasa/cyclop v1.2.3
+## explicit; go 1.22.0
github.com/bkielbasa/cyclop/pkg/analyzer
# github.com/blizzy78/varnamelen v0.8.0
## explicit; go 1.16
@@ -208,11 +208,11 @@ github.com/blizzy78/varnamelen
# github.com/bombsimon/wsl/v4 v4.4.1
## explicit; go 1.21
github.com/bombsimon/wsl/v4
-# github.com/breml/bidichk v0.2.7
-## explicit; go 1.20
+# github.com/breml/bidichk v0.3.2
+## explicit; go 1.22.0
github.com/breml/bidichk/pkg/bidichk
-# github.com/breml/errchkjson v0.3.6
-## explicit; go 1.20
+# github.com/breml/errchkjson v0.4.0
+## explicit; go 1.22.0
github.com/breml/errchkjson
# github.com/butuzov/ireturn v0.3.0
## explicit; go 1.18
@@ -253,7 +253,7 @@ github.com/chavacava/garif
# github.com/chigopher/pathlib v0.19.1
## explicit; go 1.21
github.com/chigopher/pathlib
-# github.com/ckaznocha/intrange v0.2.0
+# github.com/ckaznocha/intrange v0.2.1
## explicit; go 1.22
github.com/ckaznocha/intrange
# github.com/cncf/xds/go v0.0.0-20240423153145-555b57ec207b
@@ -339,7 +339,7 @@ github.com/envoyproxy/protoc-gen-validate/validate
# github.com/ettle/strcase v0.2.0
## explicit; go 1.12
github.com/ettle/strcase
-# github.com/fatih/color v1.17.0
+# github.com/fatih/color v1.18.0
## explicit; go 1.17
github.com/fatih/color
# github.com/fatih/structtag v1.2.0
@@ -357,10 +357,10 @@ github.com/fsnotify/fsnotify
# github.com/fzipp/gocyclo v0.6.0
## explicit; go 1.18
github.com/fzipp/gocyclo
-# github.com/ghostiam/protogetter v0.3.6
-## explicit; go 1.19
+# github.com/ghostiam/protogetter v0.3.8
+## explicit; go 1.22.0
github.com/ghostiam/protogetter
-# github.com/go-critic/go-critic v0.11.4
+# github.com/go-critic/go-critic v0.11.5
## explicit; go 1.18
github.com/go-critic/go-critic/checkers
github.com/go-critic/go-critic/checkers/internal/astwalk
@@ -395,7 +395,7 @@ github.com/go-toolsmith/strparse
# github.com/go-toolsmith/typep v1.1.0
## explicit; go 1.16
github.com/go-toolsmith/typep
-# github.com/go-viper/mapstructure/v2 v2.1.0
+# github.com/go-viper/mapstructure/v2 v2.2.1
## explicit; go 1.18
github.com/go-viper/mapstructure/v2
github.com/go-viper/mapstructure/v2/internal/errors
@@ -440,19 +440,23 @@ github.com/golangci/dupl/printer
github.com/golangci/dupl/suffixtree
github.com/golangci/dupl/syntax
github.com/golangci/dupl/syntax/golang
+# github.com/golangci/go-printf-func-name v0.1.0
+## explicit; go 1.22.0
+github.com/golangci/go-printf-func-name/pkg/analyzer
# github.com/golangci/gofmt v0.0.0-20240816233607-d8596aa466a9
## explicit; go 1.22
github.com/golangci/gofmt/gofmt
github.com/golangci/gofmt/gofmt/internal/diff
github.com/golangci/gofmt/goimports
-# github.com/golangci/golangci-lint v1.61.0
+# github.com/golangci/golangci-lint v1.62.0
## explicit; go 1.22.1
github.com/golangci/golangci-lint/cmd/golangci-lint
github.com/golangci/golangci-lint/internal/cache
github.com/golangci/golangci-lint/internal/errorutil
-github.com/golangci/golangci-lint/internal/pkgcache
-github.com/golangci/golangci-lint/internal/renameio
-github.com/golangci/golangci-lint/internal/robustio
+github.com/golangci/golangci-lint/internal/go/cache
+github.com/golangci/golangci-lint/internal/go/mmap
+github.com/golangci/golangci-lint/internal/go/quoted
+github.com/golangci/golangci-lint/internal/go/robustio
github.com/golangci/golangci-lint/pkg/commands
github.com/golangci/golangci-lint/pkg/commands/internal
github.com/golangci/golangci-lint/pkg/config
@@ -482,7 +486,6 @@ github.com/golangci/golangci-lint/pkg/golinters/errcheck
github.com/golangci/golangci-lint/pkg/golinters/errchkjson
github.com/golangci/golangci-lint/pkg/golinters/errname
github.com/golangci/golangci-lint/pkg/golinters/errorlint
-github.com/golangci/golangci-lint/pkg/golinters/execinquery
github.com/golangci/golangci-lint/pkg/golinters/exhaustive
github.com/golangci/golangci-lint/pkg/golinters/exhaustruct
github.com/golangci/golangci-lint/pkg/golinters/exportloopref
@@ -514,6 +517,7 @@ github.com/golangci/golangci-lint/pkg/golinters/gosimple
github.com/golangci/golangci-lint/pkg/golinters/gosmopolitan
github.com/golangci/golangci-lint/pkg/golinters/govet
github.com/golangci/golangci-lint/pkg/golinters/grouper
+github.com/golangci/golangci-lint/pkg/golinters/iface
github.com/golangci/golangci-lint/pkg/golinters/importas
github.com/golangci/golangci-lint/pkg/golinters/inamedparam
github.com/golangci/golangci-lint/pkg/golinters/ineffassign
@@ -546,6 +550,7 @@ github.com/golangci/golangci-lint/pkg/golinters/predeclared
github.com/golangci/golangci-lint/pkg/golinters/promlinter
github.com/golangci/golangci-lint/pkg/golinters/protogetter
github.com/golangci/golangci-lint/pkg/golinters/reassign
+github.com/golangci/golangci-lint/pkg/golinters/recvcheck
github.com/golangci/golangci-lint/pkg/golinters/revive
github.com/golangci/golangci-lint/pkg/golinters/rowserrcheck
github.com/golangci/golangci-lint/pkg/golinters/sloglint
@@ -721,9 +726,6 @@ github.com/jingyugao/rowserrcheck/passes/rowserr
# github.com/jinzhu/copier v0.3.5
## explicit; go 1.13
github.com/jinzhu/copier
-# github.com/jirfag/go-printf-func-name v0.0.0-20200119135958-7558a9eaa5af
-## explicit; go 1.13
-github.com/jirfag/go-printf-func-name/pkg/analyzer
# github.com/jjti/go-spancheck v0.6.2
## explicit; go 1.20
github.com/jjti/go-spancheck
@@ -733,7 +735,7 @@ github.com/julz/importas
# github.com/karamaru-alpha/copyloopvar v1.1.0
## explicit; go 1.21
github.com/karamaru-alpha/copyloopvar
-# github.com/kisielk/errcheck v1.7.0
+# github.com/kisielk/errcheck v1.8.0
## explicit; go 1.18
github.com/kisielk/errcheck/errcheck
# github.com/kkHAIKE/contextcheck v1.1.5
@@ -760,8 +762,8 @@ github.com/kunwardeep/paralleltest/pkg/paralleltest
# github.com/kyoh86/exportloopref v0.1.11
## explicit; go 1.18
github.com/kyoh86/exportloopref
-# github.com/lasiar/canonicalheader v1.1.1
-## explicit; go 1.21
+# github.com/lasiar/canonicalheader v1.1.2
+## explicit; go 1.22.0
github.com/lasiar/canonicalheader
# github.com/ldez/gomoddirectives v0.2.4
## explicit; go 1.21
@@ -777,9 +779,6 @@ github.com/leonklingele/grouper/pkg/analyzer/globals
github.com/leonklingele/grouper/pkg/analyzer/imports
github.com/leonklingele/grouper/pkg/analyzer/types
github.com/leonklingele/grouper/pkg/analyzer/vars
-# github.com/lufeee/execinquery v1.2.1
-## explicit; go 1.17
-github.com/lufeee/execinquery
# github.com/macabu/inamedparam v0.1.3
## explicit; go 1.20
github.com/macabu/inamedparam
@@ -801,11 +800,11 @@ github.com/mattn/go-colorable
# github.com/mattn/go-isatty v0.0.20
## explicit; go 1.15
github.com/mattn/go-isatty
-# github.com/mattn/go-runewidth v0.0.14
+# github.com/mattn/go-runewidth v0.0.16
## explicit; go 1.9
github.com/mattn/go-runewidth
-# github.com/mgechev/revive v1.3.9
-## explicit; go 1.21
+# github.com/mgechev/revive v1.5.0
+## explicit; go 1.22.1
github.com/mgechev/revive/config
github.com/mgechev/revive/formatter
github.com/mgechev/revive/internal/ifelse
@@ -835,15 +834,22 @@ github.com/nishanths/exhaustive
# github.com/nishanths/predeclared v0.2.2
## explicit; go 1.14
github.com/nishanths/predeclared/passes/predeclared
-# github.com/nunnatsa/ginkgolinter v0.16.2
-## explicit; go 1.21
+# github.com/nunnatsa/ginkgolinter v0.18.0
+## explicit; go 1.22.0
github.com/nunnatsa/ginkgolinter
+github.com/nunnatsa/ginkgolinter/internal/expression
+github.com/nunnatsa/ginkgolinter/internal/expression/actual
+github.com/nunnatsa/ginkgolinter/internal/expression/matcher
+github.com/nunnatsa/ginkgolinter/internal/expression/value
+github.com/nunnatsa/ginkgolinter/internal/formatter
github.com/nunnatsa/ginkgolinter/internal/ginkgohandler
github.com/nunnatsa/ginkgolinter/internal/gomegahandler
+github.com/nunnatsa/ginkgolinter/internal/gomegainfo
github.com/nunnatsa/ginkgolinter/internal/interfaces
github.com/nunnatsa/ginkgolinter/internal/intervals
github.com/nunnatsa/ginkgolinter/internal/reports
github.com/nunnatsa/ginkgolinter/internal/reverseassertion
+github.com/nunnatsa/ginkgolinter/internal/rules
github.com/nunnatsa/ginkgolinter/linter
github.com/nunnatsa/ginkgolinter/types
github.com/nunnatsa/ginkgolinter/version
@@ -932,9 +938,18 @@ github.com/quasilyte/regex/syntax
# github.com/quasilyte/stdinfo v0.0.0-20220114132959-f7386bf02567
## explicit; go 1.17
github.com/quasilyte/stdinfo
-# github.com/rivo/uniseg v0.4.4
+# github.com/raeperd/recvcheck v0.1.2
+## explicit; go 1.22.0
+github.com/raeperd/recvcheck
+# github.com/rivo/uniseg v0.4.7
## explicit; go 1.18
github.com/rivo/uniseg
+# github.com/rogpeppe/go-internal v1.13.1
+## explicit; go 1.22
+github.com/rogpeppe/go-internal/internal/syscall/windows
+github.com/rogpeppe/go-internal/internal/syscall/windows/sysdll
+github.com/rogpeppe/go-internal/lockedfile
+github.com/rogpeppe/go-internal/lockedfile/internal/filelock
# github.com/rs/zerolog v1.29.0
## explicit; go 1.15
github.com/rs/zerolog
@@ -961,8 +976,8 @@ github.com/sashamelentyev/interfacebloat/pkg/analyzer
## explicit; go 1.20
github.com/sashamelentyev/usestdlibvars/pkg/analyzer
github.com/sashamelentyev/usestdlibvars/pkg/analyzer/internal/mapping
-# github.com/securego/gosec/v2 v2.21.2
-## explicit; go 1.22
+# github.com/securego/gosec/v2 v2.21.4
+## explicit; go 1.22.0
github.com/securego/gosec/v2
github.com/securego/gosec/v2/analyzers
github.com/securego/gosec/v2/cwe
@@ -980,11 +995,11 @@ github.com/sirupsen/logrus
# github.com/sivchari/containedctx v1.0.3
## explicit; go 1.17
github.com/sivchari/containedctx
-# github.com/sivchari/tenv v1.10.0
-## explicit; go 1.21.0
+# github.com/sivchari/tenv v1.12.1
+## explicit; go 1.22.0
github.com/sivchari/tenv
-# github.com/sonatard/noctx v0.0.2
-## explicit; go 1.20
+# github.com/sonatard/noctx v0.1.0
+## explicit; go 1.22.0
github.com/sonatard/noctx
github.com/sonatard/noctx/ngfunc
github.com/sonatard/noctx/reqwithoutctx
@@ -1041,16 +1056,15 @@ github.com/subosito/gotenv
# github.com/tdakkota/asciicheck v0.2.0
## explicit; go 1.18
github.com/tdakkota/asciicheck
-# github.com/tetafro/godot v1.4.17
+# github.com/tetafro/godot v1.4.18
## explicit; go 1.20
github.com/tetafro/godot
# github.com/timakin/bodyclose v0.0.0-20230421092635-574207250966
## explicit; go 1.12
github.com/timakin/bodyclose/passes/bodyclose
-# github.com/timonwong/loggercheck v0.9.4
-## explicit; go 1.18
+# github.com/timonwong/loggercheck v0.10.1
+## explicit; go 1.22.0
github.com/timonwong/loggercheck
-github.com/timonwong/loggercheck/internal/bytebufferpool
github.com/timonwong/loggercheck/internal/checkers
github.com/timonwong/loggercheck/internal/checkers/printf
github.com/timonwong/loggercheck/internal/rules
@@ -1079,6 +1093,12 @@ github.com/ultraware/whitespace
# github.com/uudashr/gocognit v1.1.3
## explicit; go 1.18
github.com/uudashr/gocognit
+# github.com/uudashr/iface v1.2.0
+## explicit; go 1.21.0
+github.com/uudashr/iface/identical
+github.com/uudashr/iface/internal/directive
+github.com/uudashr/iface/opaque
+github.com/uudashr/iface/unused
# github.com/vektra/mockery/v2 v2.45.1
## explicit; go 1.22
github.com/vektra/mockery/v2
@@ -1107,7 +1127,7 @@ github.com/zeebo/xxh3
# gitlab.com/bosi/decorder v0.4.2
## explicit; go 1.20
gitlab.com/bosi/decorder
-# go-simpler.org/musttag v0.12.2
+# go-simpler.org/musttag v0.13.0
## explicit; go 1.20
go-simpler.org/musttag
# go-simpler.org/sloglint v0.7.2
@@ -1168,8 +1188,8 @@ go.opentelemetry.io/otel/trace/embedded
# go.uber.org/atomic v1.10.0
## explicit; go 1.18
go.uber.org/atomic
-# go.uber.org/automaxprocs v1.5.3
-## explicit; go 1.18
+# go.uber.org/automaxprocs v1.6.0
+## explicit; go 1.20
go.uber.org/automaxprocs/internal/cgroups
go.uber.org/automaxprocs/internal/runtime
go.uber.org/automaxprocs/maxprocs
@@ -1185,7 +1205,7 @@ go.uber.org/zap/internal/bufferpool
go.uber.org/zap/internal/color
go.uber.org/zap/internal/exit
go.uber.org/zap/zapcore
-# golang.org/x/crypto v0.28.0
+# golang.org/x/crypto v0.29.0
## explicit; go 1.20
golang.org/x/crypto/chacha20
golang.org/x/crypto/chacha20poly1305
@@ -1194,21 +1214,20 @@ golang.org/x/crypto/cryptobyte/asn1
golang.org/x/crypto/hkdf
golang.org/x/crypto/internal/alias
golang.org/x/crypto/internal/poly1305
-# golang.org/x/exp v0.0.0-20240904232852-e7e105dedf7e
-## explicit; go 1.20
+# golang.org/x/exp v0.0.0-20240909161429-701f63a606c0
+## explicit; go 1.22.0
golang.org/x/exp/constraints
golang.org/x/exp/maps
-golang.org/x/exp/slices
-# golang.org/x/exp/typeparams v0.0.0-20240314144324-c7f7c6466f7f
+# golang.org/x/exp/typeparams v0.0.0-20240909161429-701f63a606c0
## explicit; go 1.18
golang.org/x/exp/typeparams
-# golang.org/x/mod v0.21.0
+# golang.org/x/mod v0.22.0
## explicit; go 1.22.0
golang.org/x/mod/internal/lazyregexp
golang.org/x/mod/modfile
golang.org/x/mod/module
golang.org/x/mod/semver
-# golang.org/x/net v0.30.0
+# golang.org/x/net v0.31.0
## explicit; go 1.18
golang.org/x/net/http/httpguts
golang.org/x/net/http2
@@ -1233,7 +1252,7 @@ golang.org/x/oauth2/jwt
golang.org/x/perf/benchstat
golang.org/x/perf/internal/stats
golang.org/x/perf/storage/benchfmt
-# golang.org/x/sync v0.8.0
+# golang.org/x/sync v0.9.0
## explicit; go 1.18
golang.org/x/sync/errgroup
golang.org/x/sync/semaphore
@@ -1244,10 +1263,10 @@ golang.org/x/sys/plan9
golang.org/x/sys/unix
golang.org/x/sys/windows
golang.org/x/sys/windows/registry
-# golang.org/x/term v0.25.0
+# golang.org/x/term v0.26.0
## explicit; go 1.18
golang.org/x/term
-# golang.org/x/text v0.19.0
+# golang.org/x/text v0.20.0
## explicit; go 1.18
golang.org/x/text/runes
golang.org/x/text/secure/bidirule
@@ -1259,7 +1278,7 @@ golang.org/x/text/width
# golang.org/x/time v0.6.0
## explicit; go 1.18
golang.org/x/time/rate
-# golang.org/x/tools v0.26.0
+# golang.org/x/tools v0.27.0
## explicit; go 1.22.0
golang.org/x/tools/cmd/goimports
golang.org/x/tools/cmd/stringer
@@ -1352,7 +1371,7 @@ golang.org/x/tools/txtar
## explicit; go 1.18
golang.org/x/xerrors
golang.org/x/xerrors/internal
-# google.golang.org/api v0.197.0
+# google.golang.org/api v0.198.0
## explicit; go 1.21
google.golang.org/api/bigquery/v2
google.golang.org/api/compute/v1